diff --git a/.claude/commands/gla:add-material.md b/.claude/commands/gla:add-material.md deleted file mode 100644 index 4e36e9f..0000000 --- a/.claude/commands/gla:add-material.md +++ /dev/null @@ -1,5 +0,0 @@ -gla:add-material — Image Material System - -Read and follow the instructions in skills/gla:add-material.md - -Arguments: $ARGUMENTS diff --git a/.claude/commands/gla:brand-logo.md b/.claude/commands/gla:brand-logo.md deleted file mode 100644 index 9f55213..0000000 --- a/.claude/commands/gla:brand-logo.md +++ /dev/null @@ -1,5 +0,0 @@ -gla:brand-logo — Apply Channel Branding (Intro + Outro + Logo + 4K Badge) - -Read and follow the instructions in skills/gla:brand-logo.md - -Arguments: $ARGUMENTS diff --git a/.claude/commands/gla:camera-guide.md b/.claude/commands/gla:camera-guide.md deleted file mode 100644 index 4b4a9a2..0000000 --- a/.claude/commands/gla:camera-guide.md +++ /dev/null @@ -1,5 +0,0 @@ -Camera Guide — Cinematic Video Prompts (Veo 3) - -Read and follow the instructions in skills/gla:camera-guide.md - -Arguments: $ARGUMENTS diff --git a/.claude/commands/gla:change-model.md b/.claude/commands/gla:change-model.md deleted file mode 100644 index a18a089..0000000 --- a/.claude/commands/gla:change-model.md +++ /dev/null @@ -1,5 +0,0 @@ -gla:change-model — View & Change Video/Image Model Keys - -Read and follow the instructions in skills/gla:change-model.md - -Arguments: $ARGUMENTS diff --git a/.claude/commands/gla:concat-fit-narrator.md b/.claude/commands/gla:concat-fit-narrator.md deleted file mode 100644 index 3628eb5..0000000 --- a/.claude/commands/gla:concat-fit-narrator.md +++ /dev/null @@ -1,5 +0,0 @@ -Trim each scene video to fit its TTS narrator duration, burn text overlays, then concatenate into a final video. - -Read and follow the instructions in skills/gla:concat-fit-narrator.md - -Arguments: $ARGUMENTS diff --git a/.claude/commands/gla:concat.md b/.claude/commands/gla:concat.md deleted file mode 100644 index f291197..0000000 --- a/.claude/commands/gla:concat.md +++ /dev/null @@ -1,5 +0,0 @@ -Download and concatenate all scene videos into a single video with optional TTS narration. - -Read and follow the instructions in skills/gla:concat.md - -Arguments: $ARGUMENTS diff --git a/.claude/commands/gla:create-project.md b/.claude/commands/gla:create-project.md deleted file mode 100644 index 5d7c989..0000000 --- a/.claude/commands/gla:create-project.md +++ /dev/null @@ -1,5 +0,0 @@ -Create a new Google Flow video project. Ask the user for: - -Read and follow the instructions in skills/gla:create-project.md - -Arguments: $ARGUMENTS diff --git a/.claude/commands/gla:creative-mix.md b/.claude/commands/gla:creative-mix.md deleted file mode 100644 index 83d35c3..0000000 --- a/.claude/commands/gla:creative-mix.md +++ /dev/null @@ -1,5 +0,0 @@ -Creative video mixing — combine techniques for cinematic results. - -Read and follow the instructions in skills/gla:creative-mix.md - -Arguments: $ARGUMENTS diff --git a/.claude/commands/gla:fix-uuids.md b/.claude/commands/gla:fix-uuids.md deleted file mode 100644 index 41c1dde..0000000 --- a/.claude/commands/gla:fix-uuids.md +++ /dev/null @@ -1,5 +0,0 @@ -Find and fix any non-UUID media_ids (CAMS... format) across all scenes and entities. - -Read and follow the instructions in skills/gla:fix-uuids.md - -Arguments: $ARGUMENTS diff --git a/.claude/commands/gla:gen-chain-videos.md b/.claude/commands/gla:gen-chain-videos.md deleted file mode 100644 index c0483be..0000000 --- a/.claude/commands/gla:gen-chain-videos.md +++ /dev/null @@ -1,5 +0,0 @@ -Generate videos with automatic scene chaining (start+end frame transitions). - -Read and follow the instructions in skills/gla:gen-chain-videos.md - -Arguments: $ARGUMENTS diff --git a/.claude/commands/gla:gen-images.md b/.claude/commands/gla:gen-images.md deleted file mode 100644 index a3668f9..0000000 --- a/.claude/commands/gla:gen-images.md +++ /dev/null @@ -1,5 +0,0 @@ -Generate scene images for all scenes in a video. - -Read and follow the instructions in skills/gla:gen-images.md - -Arguments: $ARGUMENTS diff --git a/.claude/commands/gla:gen-narrator.md b/.claude/commands/gla:gen-narrator.md deleted file mode 100644 index 987c6ca..0000000 --- a/.claude/commands/gla:gen-narrator.md +++ /dev/null @@ -1,5 +0,0 @@ -gla:gen-narrator — Generate Narrator Text + TTS for All Scenes - -Read and follow the instructions in skills/gla:gen-narrator.md - -Arguments: $ARGUMENTS diff --git a/.claude/commands/gla:gen-refs.md b/.claude/commands/gla:gen-refs.md deleted file mode 100644 index 5d6098d..0000000 --- a/.claude/commands/gla:gen-refs.md +++ /dev/null @@ -1,5 +0,0 @@ -Generate reference images for all entities in a project. - -Read and follow the instructions in skills/gla:gen-refs.md - -Arguments: $ARGUMENTS diff --git a/.claude/commands/gla:gen-tts-template.md b/.claude/commands/gla:gen-tts-template.md deleted file mode 100644 index ac615c4..0000000 --- a/.claude/commands/gla:gen-tts-template.md +++ /dev/null @@ -1,5 +0,0 @@ -gla:gen-tts-template — Generate Voice Template - -Read and follow the instructions in skills/gla:gen-tts-template.md - -Arguments: $ARGUMENTS diff --git a/.claude/commands/gla:gen-videos.md b/.claude/commands/gla:gen-videos.md deleted file mode 100644 index 382b2bd..0000000 --- a/.claude/commands/gla:gen-videos.md +++ /dev/null @@ -1,5 +0,0 @@ -Generate videos for all scenes in a video. - -Read and follow the instructions in skills/gla:gen-videos.md - -Arguments: $ARGUMENTS diff --git a/.claude/commands/gla:insert-scene.md b/.claude/commands/gla:insert-scene.md deleted file mode 100644 index dc90c7f..0000000 --- a/.claude/commands/gla:insert-scene.md +++ /dev/null @@ -1,5 +0,0 @@ -Insert new scene(s) into an existing video chain — for multi-angle shots, cutaways, or close-ups. - -Read and follow the instructions in skills/gla:insert-scene.md - -Arguments: $ARGUMENTS diff --git a/.claude/commands/gla:research.md b/.claude/commands/gla:research.md deleted file mode 100644 index a1942e4..0000000 --- a/.claude/commands/gla:research.md +++ /dev/null @@ -1,5 +0,0 @@ -gla:research — Fact-Check & Research Before Scripting - -Read and follow the instructions in skills/gla:research.md - -Arguments: $ARGUMENTS diff --git a/.claude/commands/gla:status.md b/.claude/commands/gla:status.md deleted file mode 100644 index 84679b7..0000000 --- a/.claude/commands/gla:status.md +++ /dev/null @@ -1,5 +0,0 @@ -Show full status dashboard for a project. - -Read and follow the instructions in skills/gla:status.md - -Arguments: $ARGUMENTS diff --git a/.claude/commands/gla:switch-project.md b/.claude/commands/gla:switch-project.md deleted file mode 100644 index 04fa55b..0000000 --- a/.claude/commands/gla:switch-project.md +++ /dev/null @@ -1,5 +0,0 @@ -gla:switch-project — Switch Active Project - -Read and follow the instructions in skills/gla:switch-project.md - -Arguments: $ARGUMENTS diff --git a/.claude/commands/gla:thumbnail.md b/.claude/commands/gla:thumbnail.md deleted file mode 100644 index 0fa6c95..0000000 --- a/.claude/commands/gla:thumbnail.md +++ /dev/null @@ -1,5 +0,0 @@ -Generate 4 YouTube-optimized thumbnail variants for a project video. - -Read and follow the instructions in skills/gla:thumbnail.md - -Arguments: $ARGUMENTS diff --git a/.claude/commands/gla:youtube-seo.md b/.claude/commands/gla:youtube-seo.md deleted file mode 100644 index a065c84..0000000 --- a/.claude/commands/gla:youtube-seo.md +++ /dev/null @@ -1,5 +0,0 @@ -gla:youtube-seo — Generate YouTube Metadata (SEO-Optimized) - -Read and follow the instructions in skills/gla:youtube-seo.md - -Arguments: $ARGUMENTS diff --git a/.claude/commands/gla:youtube-upload.md b/.claude/commands/gla:youtube-upload.md deleted file mode 100644 index f778f3b..0000000 --- a/.claude/commands/gla:youtube-upload.md +++ /dev/null @@ -1,5 +0,0 @@ -gla:youtube-upload — Upload Video to YouTube (Shorts + Long-form) - -Read and follow the instructions in skills/gla:youtube-upload.md - -Arguments: $ARGUMENTS diff --git a/.github/workflows/build-macos.yml b/.github/workflows/build-macos.yml new file mode 100644 index 0000000..fdcd621 --- /dev/null +++ b/.github/workflows/build-macos.yml @@ -0,0 +1,86 @@ +name: Build FlowKit Desktop (macOS) + +on: + push: + tags: + - 'v*' + workflow_dispatch: + inputs: + version: + description: 'Version tag (e.g. v0.2.0)' + required: false + +jobs: + build-macos: + runs-on: macos-latest + steps: + - uses: actions/checkout@v4 + + - name: Set up Python 3.11 + uses: actions/setup-python@v5 + with: + python-version: '3.11' + + - name: Install Python dependencies + run: | + python -m pip install --upgrade pip + pip install -r requirements.txt + pip install pyinstaller + + - name: Bundle Python agent with PyInstaller + shell: bash + run: | + rm -rf desktop/resources/agent + mkdir -p desktop/resources/agent + pyinstaller \ + --noconfirm \ + --onefile \ + --name flowkit-agent \ + --distpath desktop/resources/agent \ + --workpath /tmp/flowkit-build \ + --add-data "agent:agent" \ + --hidden-import uvicorn \ + --hidden-import uvicorn.logging \ + --hidden-import uvicorn.loops \ + --hidden-import uvicorn.loops.auto \ + --hidden-import uvicorn.protocols \ + --hidden-import uvicorn.protocols.http \ + --hidden-import uvicorn.protocols.http.auto \ + --hidden-import uvicorn.protocols.websockets \ + --hidden-import uvicorn.protocols.websockets.auto \ + --hidden-import uvicorn.lifespan \ + --hidden-import uvicorn.lifespan.on \ + --hidden-import websockets \ + agent/main.py + + - name: Set up Node.js + uses: actions/setup-node@v4 + with: + node-version: '20' + cache: 'npm' + cache-dependency-path: desktop/package-lock.json + + - name: Install npm dependencies + working-directory: desktop + run: npm install --legacy-peer-deps + + - name: Build Electron app (macOS) + working-directory: desktop + run: npm run build:mac + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + + - name: Upload DMG artifact + uses: actions/upload-artifact@v4 + with: + name: flowkit-macos-installer + path: desktop/dist/*.dmg + retention-days: 30 + + - name: Upload to GitHub Release + if: startsWith(github.ref, 'refs/tags/') + uses: softprops/action-gh-release@v1 + with: + files: desktop/dist/*.dmg + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/build-windows.yml b/.github/workflows/build-windows.yml new file mode 100644 index 0000000..0cc0cb8 --- /dev/null +++ b/.github/workflows/build-windows.yml @@ -0,0 +1,90 @@ +name: Build FlowKit Desktop (Windows) + +on: + push: + tags: + - 'v*' + workflow_dispatch: + inputs: + version: + description: 'Version tag (e.g. v0.2.0)' + required: false + +jobs: + build-windows: + runs-on: windows-latest + steps: + - uses: actions/checkout@v4 + + - name: Set up Python 3.11 + uses: actions/setup-python@v5 + with: + python-version: '3.11' + + - name: Install Python dependencies + run: | + python -m pip install --upgrade pip + pip install -r requirements.txt + pip install pyinstaller + + - name: Bundle Python agent with PyInstaller + shell: bash + run: | + rm -rf desktop/resources/agent + mkdir -p desktop/resources/agent + pyinstaller \ + --noconfirm \ + --onefile \ + --name flowkit-agent \ + --distpath desktop/resources/agent \ + --workpath /tmp/flowkit-build \ + --add-data "agent;agent" \ + --hidden-import uvicorn \ + --hidden-import uvicorn.logging \ + --hidden-import uvicorn.loops \ + --hidden-import uvicorn.loops.auto \ + --hidden-import uvicorn.protocols \ + --hidden-import uvicorn.protocols.http \ + --hidden-import uvicorn.protocols.http.auto \ + --hidden-import uvicorn.protocols.websockets \ + --hidden-import uvicorn.protocols.websockets.auto \ + --hidden-import uvicorn.lifespan \ + --hidden-import uvicorn.lifespan.on \ + --hidden-import websockets \ + agent/main.py + if [ -f desktop/resources/agent/flowkit-agent ] && [ ! -f desktop/resources/agent/flowkit-agent.exe ]; then + mv desktop/resources/agent/flowkit-agent desktop/resources/agent/flowkit-agent.exe + fi + test -f desktop/resources/agent/flowkit-agent.exe + + - name: Set up Node.js + uses: actions/setup-node@v4 + with: + node-version: '20' + cache: 'npm' + cache-dependency-path: desktop/package-lock.json + + - name: Install npm dependencies + working-directory: desktop + run: npm install --legacy-peer-deps + + - name: Build Electron app (Windows) + working-directory: desktop + run: npm run build:win + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + + - name: Upload installer artifact + uses: actions/upload-artifact@v4 + with: + name: flowkit-windows-installer + path: desktop/dist/*.exe + retention-days: 30 + + - name: Upload to GitHub Release + if: startsWith(github.ref, 'refs/tags/') + uses: softprops/action-gh-release@v1 + with: + files: desktop/dist/*.exe + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.gitignore b/.gitignore index 465b817..9196d10 100644 --- a/.gitignore +++ b/.gitignore @@ -8,10 +8,27 @@ __pycache__/ output/ youtube agent/active_project.json +.pytest_cache/ +venv/ # Generated AI tool configs (run: python setup.py) .claude/commands/fk:* .gemini/commands/ AGENTS.md GEMINI.md -.fk-setup.json \ No newline at end of file +.fk-setup.json + +# OS / local machine +.DS_Store +.agent/ + +# Node / Electron build artifacts +node_modules/ +cloudflare-license/node_modules/ +desktop/node_modules/ +desktop/dist/ +desktop/dist-electron/ +desktop/out/ +desktop/resources/agent/ +build-artifacts/ +release-assets/ diff --git a/README.md b/README.md index e54184e..2c249a5 100644 --- a/README.md +++ b/README.md @@ -607,6 +607,18 @@ Each channel has a rules file controlling upload scheduling and SEO: Upload validation checks: max per day, min gap between uploads, avoid dead hours. Auto-detects Short (<61s + vertical 9:16) vs Long-form. +## Desktop License Activation (Commercial) + +FlowKit desktop now supports machine-bound activation: + +- app shows a stable `Machine ID` at startup +- only devices activated in CMS can use the app +- plans supported: `1M`, `3M`, `6M`, `1Y`, `LIFE` + +Deployment + CMS guide: + +- [docs/license-cloudflare.md](docs/license-cloudflare.md) + ## Troubleshooting | Problem | Solution | diff --git a/agent/api/active_project.py b/agent/api/active_project.py index b0b0389..e7d9120 100644 --- a/agent/api/active_project.py +++ b/agent/api/active_project.py @@ -8,6 +8,7 @@ from fastapi import APIRouter, HTTPException from agent.db import crud +from agent.utils.orientation import normalize_orientation router = APIRouter(prefix="/api/active-project", tags=["active-project"]) logger = logging.getLogger(__name__) @@ -55,12 +56,15 @@ async def get_active_project(): if project: # Enrich with video info videos = await crud.list_videos(project_id=project["id"]) - video = videos[0] if videos else None + state_video_id = state.get("video_id") + video = next((v for v in videos if v["id"] == state_video_id), None) if state_video_id else None + if not video and videos: + video = videos[0] return { "project_id": project["id"], "project_name": project["name"], "video_id": video["id"] if video else None, - "orientation": video.get("orientation") if video else None, + "orientation": normalize_orientation(video.get("orientation")) if video else None, "material": project.get("material"), "status": project.get("status"), "source": "explicit", @@ -81,7 +85,7 @@ async def get_active_project(): "project_id": project["id"], "project_name": project["name"], "video_id": video["id"] if video else None, - "orientation": video.get("orientation") if video else None, + "orientation": normalize_orientation(video.get("orientation")) if video else None, "material": project.get("material"), "status": project.get("status"), "source": "fallback_most_recent", @@ -92,6 +96,7 @@ async def get_active_project(): async def set_active_project(body: dict): """Set the active project by project_id.""" project_id = body.get("project_id") + video_id = body.get("video_id") if not project_id: raise HTTPException(status_code=400, detail="project_id is required") @@ -99,16 +104,31 @@ async def set_active_project(body: dict): if not project: raise HTTPException(status_code=404, detail=f"Project {project_id} not found") - _write_state({"project_id": project_id}) - logger.info("Active project set: %s (%s)", project["name"], project_id[:8]) - videos = await crud.list_videos(project_id=project_id) - video = videos[0] if videos else None + video = None + if video_id: + video = next((v for v in videos if v["id"] == video_id), None) + if not video: + raise HTTPException(status_code=400, detail=f"video_id {video_id} does not belong to project {project_id}") + if not video and videos: + video = videos[0] + + state = {"project_id": project_id} + if video: + state["video_id"] = video["id"] + _write_state(state) + logger.info( + "Active project set: %s (%s) video=%s", + project["name"], + project_id[:8], + (video["id"][:8] if video else "none"), + ) + return { "project_id": project["id"], "project_name": project["name"], "video_id": video["id"] if video else None, - "orientation": video.get("orientation") if video else None, + "orientation": normalize_orientation(video.get("orientation")) if video else None, "material": project.get("material"), "status": project.get("status"), "source": "explicit", diff --git a/agent/api/characters.py b/agent/api/characters.py index 9b87502..8819692 100644 --- a/agent/api/characters.py +++ b/agent/api/characters.py @@ -1,6 +1,7 @@ from fastapi import APIRouter, HTTPException from agent.models.character import Character, CharacterCreate, CharacterUpdate from agent.sdk.persistence.sqlite_repository import SQLiteRepository +from agent.services.event_bus import event_bus from agent.utils.slugify import slugify router = APIRouter(prefix="/characters", tags=["characters"]) @@ -13,7 +14,9 @@ def _get_repo() -> SQLiteRepository: @router.post("", response_model=Character) async def create(body: CharacterCreate): repo = _get_repo() - return await repo.create_character(**body.model_dump(exclude_none=True)) + char = await repo.create_character(**body.model_dump(exclude_none=True)) + await event_bus.emit("character_created", {"id": char.id, "name": char.name}) + return char @router.get("", response_model=list[Character]) @@ -41,7 +44,9 @@ async def update(cid: str, body: CharacterUpdate): row = await repo.update("character", cid, **updates) if not row: raise HTTPException(404, "Character not found") - return repo._row_to_character(row) + char = repo._row_to_character(row) + await event_bus.emit("character_updated", {"id": char.id, "name": char.name}) + return char @router.delete("/{cid}") @@ -49,4 +54,5 @@ async def delete(cid: str): repo = _get_repo() if not await repo.delete_character(cid): raise HTTPException(404, "Character not found") + await event_bus.emit("character_deleted", {"id": cid}) return {"ok": True} diff --git a/agent/api/flow.py b/agent/api/flow.py index 1373e11..63b7eac 100644 --- a/agent/api/flow.py +++ b/agent/api/flow.py @@ -1,10 +1,28 @@ """Direct Flow API endpoints — for manual operations outside the queue.""" -from fastapi import APIRouter, HTTPException -from pydantic import BaseModel -from typing import Optional +from __future__ import annotations + +import base64 +import logging +import mimetypes +import uuid +from datetime import datetime, timezone +from pathlib import Path +from typing import Any, Optional + +from fastapi import APIRouter, HTTPException, Query +from fastapi.responses import FileResponse +from pydantic import BaseModel, Field + +from agent.config import OUTPUT_DIR +from agent.db import crud +from agent.materials import get_material +from agent.sdk.services.result_handler import parse_result from agent.services.flow_client import get_flow_client router = APIRouter(prefix="/flow", tags=["flow"]) +logger = logging.getLogger(__name__) +_MANUAL_PROJECT_ID: str | None = None +_LOCAL_MEDIA_ROOT = OUTPUT_DIR.resolve() class GenerateImageRequest(BaseModel): @@ -13,6 +31,7 @@ class GenerateImageRequest(BaseModel): aspect_ratio: str = "IMAGE_ASPECT_RATIO_PORTRAIT" user_paygate_tier: str = "PAYGATE_TIER_ONE" character_media_ids: Optional[list[str]] = None + image_model_key: Optional[str] = None class GenerateVideoRequest(BaseModel): @@ -23,6 +42,7 @@ class GenerateVideoRequest(BaseModel): aspect_ratio: str = "VIDEO_ASPECT_RATIO_PORTRAIT" end_image_media_id: Optional[str] = None user_paygate_tier: str = "PAYGATE_TIER_ONE" + video_model_key: Optional[str] = None class GenerateVideoRefsRequest(BaseModel): @@ -32,6 +52,7 @@ class GenerateVideoRefsRequest(BaseModel): scene_id: str aspect_ratio: str = "VIDEO_ASPECT_RATIO_PORTRAIT" user_paygate_tier: str = "PAYGATE_TIER_ONE" + video_model_key: Optional[str] = None class UpscaleVideoRequest(BaseModel): @@ -57,25 +78,329 @@ class EditImageRequest(BaseModel): project_id: str aspect_ratio: str = "IMAGE_ASPECT_RATIO_PORTRAIT" user_paygate_tier: str = "PAYGATE_TIER_ONE" + image_model_key: Optional[str] = None + + +class ManualContextRequest(BaseModel): + project_id: Optional[str] = None + create_if_missing: bool = True + user_paygate_tier: Optional[str] = None + + +class ManualImageItem(BaseModel): + prompt: str + aspect_ratio: Optional[str] = None + style: Optional[str] = None + character_media_ids: Optional[list[str]] = None + image_model_key: Optional[str] = None + + +class ManualImageBatchRequest(BaseModel): + project_id: Optional[str] = None + material: Optional[str] = "realistic" + custom_style: Optional[str] = None + aspect_ratio: str = "IMAGE_ASPECT_RATIO_PORTRAIT" + user_paygate_tier: Optional[str] = None + image_model_key: Optional[str] = None + items: list[ManualImageItem] = Field(default_factory=list) + + +class ManualVideoItem(BaseModel): + prompt: str + start_image_media_id: str + end_image_media_id: Optional[str] = None + scene_id: Optional[str] = None + aspect_ratio: Optional[str] = None + style: Optional[str] = None + video_model_key: Optional[str] = None + + +class ManualVideoBatchRequest(BaseModel): + project_id: Optional[str] = None + material: Optional[str] = "realistic" + custom_style: Optional[str] = None + aspect_ratio: str = "VIDEO_ASPECT_RATIO_PORTRAIT" + user_paygate_tier: Optional[str] = None + video_model_key: Optional[str] = None + items: list[ManualVideoItem] = Field(default_factory=list) + + +def _extract_first_url(payload: Any) -> str | None: + def _is_direct_media_url(url: str) -> bool: + low = (url or "").lower() + if not low.startswith("http"): + return False + if "media.getmediaurlredirect" in low: + return False + if low.startswith("https://flow-content.google/"): + return True + if low.startswith("https://storage.googleapis.com/"): + return True + if "googleusercontent.com/" in low: + return True + return False + + def _extract_any_url(node: Any) -> str | None: + if isinstance(node, dict): + for key in ("fifeUrl", "servingUri", "url", "imageUri", "videoUri"): + value = node.get(key) + if isinstance(value, str) and value.startswith("http"): + return value + for value in node.values(): + found = _extract_any_url(value) + if found: + return found + return None + if isinstance(node, list): + for item in node: + found = _extract_any_url(item) + if found: + return found + return None + + if isinstance(payload, dict): + for key in ("fifeUrl", "servingUri", "url", "imageUri", "videoUri"): + value = payload.get(key) + if isinstance(value, str) and _is_direct_media_url(value): + return value + for value in payload.values(): + found = _extract_first_url(value) + if found: + return found + return _extract_any_url(payload) + if isinstance(payload, list): + for item in payload: + found = _extract_first_url(item) + if found: + return found + return _extract_any_url(payload) + return None + + +def _walk_values(node: Any): + if isinstance(node, dict): + yield node + for value in node.values(): + yield from _walk_values(value) + return + if isinstance(node, list): + for item in node: + yield from _walk_values(item) + + +def _extract_project_id_from_flow_response(flow_result: dict) -> str | None: + payload = flow_result.get("data", flow_result) + candidates: list[str] = [] + for obj in _walk_values(payload): + if not isinstance(obj, dict): + continue + pid = obj.get("projectId") + if isinstance(pid, str) and pid.strip(): + candidates.append(pid.strip()) + unique = list(dict.fromkeys(candidates)) + if not unique: + return None + if len(unique) > 1: + logger.warning("Multiple projectId candidates in Flow response, using first: %s", unique) + return unique[0] + + +def _extract_flow_error_text(flow_result: dict) -> str | None: + payload = flow_result.get("data", flow_result) + for obj in _walk_values(payload): + if not isinstance(obj, dict): + continue + err = obj.get("error") + if isinstance(err, str) and err.strip(): + return err.strip() + if isinstance(err, dict): + msg = err.get("message") + if isinstance(msg, str) and msg.strip(): + return msg.strip() + err_json = err.get("json") + if isinstance(err_json, dict): + nested = err_json.get("message") + if isinstance(nested, str) and nested.strip(): + return nested.strip() + return None + + +def _is_internal_image_error(flow_result: dict, parsed_error: str | None) -> bool: + candidates: list[str] = [] + if parsed_error: + candidates.append(parsed_error) + nested_error = _extract_flow_error_text(flow_result) + if nested_error: + candidates.append(nested_error) + raw_error = flow_result.get("error") + if isinstance(raw_error, str) and raw_error.strip(): + candidates.append(raw_error.strip()) + + merged = " | ".join(candidates).lower() + return "internal error" in merged or "internal error encountered" in merged + + +def _normalize_text(value: Optional[str]) -> str: + if not value: + return "" + return " ".join(value.strip().split()) + + +def _compose_prompt(base_prompt: str, *style_parts: Optional[str]) -> str: + base = _normalize_text(base_prompt) + style = " ".join(part for part in (_normalize_text(p) for p in style_parts) if part) + if style and base: + return f"{style}. {base}" + return style or base + + +def _resolve_material_prefix(material_id: Optional[str]) -> str: + if not material_id: + return "" + material = get_material(material_id) + if not material: + raise HTTPException(400, f"Unknown material: '{material_id}'") + return _normalize_text(material.get("scene_prefix") or material.get("style_instruction") or "") + + +def _extract_operations(raw: dict) -> list[dict]: + data = raw.get("data", raw) + if isinstance(data, dict): + ops = data.get("operations") + if isinstance(ops, list): + return ops + return [] + + +def _flow_status_code(value: Any, default: int = 502) -> int: + try: + code = int(value) + except Exception: + return default + return code if 400 <= code <= 599 else default + + +def _flow_error_detail(payload: Any) -> str: + if isinstance(payload, str): + return payload + if isinstance(payload, dict): + err = payload.get("error") + if isinstance(err, str) and err.strip(): + return err.strip() + nested = payload.get("data") + if isinstance(nested, str) and nested.strip(): + return nested.strip() + if nested is not None: + return str(nested) + return str(payload) + + +def _derive_video_submit_status(operations: list[dict], parse_success: bool) -> str: + if not operations: + return "COMPLETED" if parse_success else "FAILED" + statuses = [op.get("status") for op in operations if isinstance(op, dict)] + if any(s == "MEDIA_GENERATION_STATUS_FAILED" for s in statuses): + return "FAILED" + if statuses and all(s == "MEDIA_GENERATION_STATUS_SUCCESSFUL" for s in statuses): + return "COMPLETED" + return "SUBMITTED" + + +def _resolve_local_media_path(raw_path: str) -> Path: + if not raw_path: + raise HTTPException(400, "path is required") + candidate = Path(raw_path).expanduser() + if not candidate.is_absolute(): + raise HTTPException(400, "path must be absolute") + try: + resolved = candidate.resolve(strict=True) + except FileNotFoundError: + raise HTTPException(404, f"Local media not found: {raw_path}") + except Exception: + raise HTTPException(400, "Invalid local media path") + + try: + resolved.relative_to(_LOCAL_MEDIA_ROOT) + except ValueError: + raise HTTPException(403, "Access denied for requested path") + + if not resolved.is_file(): + raise HTTPException(404, "Local media is not a file") + return resolved + + +async def _resolve_user_tier(client, preferred: Optional[str]) -> str: + if preferred and preferred.strip(): + return preferred.strip() + try: + result = await client.get_credits() + data = result.get("data", result) + tier = data.get("userPaygateTier") + if isinstance(tier, str) and tier.strip(): + return tier.strip() + except Exception: + pass + return "PAYGATE_TIER_ONE" + + +async def _resolve_manual_project_id(client, requested_project_id: Optional[str], create_if_missing: bool = True) -> str: + global _MANUAL_PROJECT_ID + + if requested_project_id and requested_project_id.strip(): + _MANUAL_PROJECT_ID = requested_project_id.strip() + return _MANUAL_PROJECT_ID + + if _MANUAL_PROJECT_ID: + return _MANUAL_PROJECT_ID + + if not create_if_missing: + raise HTTPException(400, "No project available. Provide project_id or create one first.") + + title = f"FlowKit Manual {datetime.now(timezone.utc).strftime('%Y-%m-%d %H:%M:%S')}" + flow_result = await client.create_project(title, "PINHOLE") + if flow_result.get("error"): + raise HTTPException(502, f"Flow createProject error: {flow_result['error']}") + + status = flow_result.get("status") + if isinstance(status, int) and status >= 400: + detail = _extract_flow_error_text(flow_result) or "Unknown Flow error" + raise HTTPException(502, f"Flow createProject failed (HTTP {status}): {detail}") + + pid = _extract_project_id_from_flow_response(flow_result) + if not pid: + detail = _extract_flow_error_text(flow_result) + if detail: + raise HTTPException(502, f"Failed to parse Flow createProject response: {detail}") + raise HTTPException(502, "Failed to parse Flow createProject response: projectId not found") + + _MANUAL_PROJECT_ID = pid + logger.info("Manual flow project created: %s", pid) + return pid @router.get("/status") async def extension_status(): """Check if extension is connected.""" client = get_flow_client() - return { - "connected": client.connected, - "flow_key_present": client._flow_key is not None, - } + status = await client.get_extension_status() + return status + + +@router.get("/local-media") +async def local_media(path: str = Query(..., description="Absolute local path under output/")): + """Serve local generated media files to Electron renderer safely.""" + resolved = _resolve_local_media_path(path) + mime = mimetypes.guess_type(str(resolved))[0] or "application/octet-stream" + return FileResponse(path=resolved, media_type=mime, filename=resolved.name) @router.get("/credits") -async def get_credits(): +async def get_credits(force: bool = Query(False)): """Get user credits from Google Flow.""" client = get_flow_client() if not client.connected: raise HTTPException(503, "Extension not connected") - result = await client.get_credits() + result = await client.get_credits(force=force) if result.get("error"): raise HTTPException(502, result["error"]) return result.get("data", result) @@ -154,22 +479,47 @@ async def refresh_project_urls(project_id: str): @router.get("/media/{media_id}") -async def get_media(media_id: str): +async def get_media( + media_id: str, + project_id: Optional[str] = Query( + default=None, + description="Optional Flow project_id context. Pass app project ID to avoid cross-project lookup.", + ), +): """Get media metadata + fresh signed URL from Google Flow. Returns the raw response which should contain a fresh fifeUrl/servingUri. Use this to refresh expired GCS signed URLs. """ client = get_flow_client() + local_url = await client.find_local_media_url(media_id, project_id=project_id) + if local_url: + return {"url": local_url, "fifeUrl": local_url, "servingUri": local_url, "source": "local_cache"} if not client.connected: raise HTTPException(503, "Extension not connected") - result = await client.get_media(media_id) + + result = await client.get_media(media_id, project_id=project_id) if result.get("error"): raise HTTPException(502, result["error"]) status = result.get("status", 200) if isinstance(status, int) and status >= 400: raise HTTPException(status, result.get("data", "Media not found")) - return result.get("data", result) + payload = result.get("data", result) + fresh_url = _extract_first_url(payload) + if isinstance(fresh_url, str) and "media.getMediaUrlRedirect" in fresh_url: + fresh_url = None + if isinstance(fresh_url, str): + local_cached = await client.cache_media_locally(media_id, fresh_url, project_id=project_id) + if isinstance(local_cached, str) and local_cached: + fresh_url = local_cached + if isinstance(payload, dict): + merged = dict(payload) + if fresh_url: + merged["url"] = fresh_url + return merged + if fresh_url: + return {"data": payload, "url": fresh_url} + return payload @router.post("/edit-image") @@ -182,6 +532,7 @@ async def edit_image(body: EditImageRequest): body.prompt, body.source_media_id, body.project_id, aspect_ratio=body.aspect_ratio, user_paygate_tier=body.user_paygate_tier, + image_model_key=body.image_model_key, ) if result.get("error") or (isinstance(result.get("status"), int) and result["status"] >= 400): raise HTTPException(result.get("status", 502), result.get("error", result.get("data"))) @@ -191,7 +542,6 @@ async def edit_image(body: EditImageRequest): @router.post("/upload-image") async def upload_image(body: UploadImageRequest): """Upload a local image file to Google Flow and get a media_id.""" - import base64, mimetypes client = get_flow_client() if not client.connected: raise HTTPException(503, "Extension not connected") @@ -202,8 +552,215 @@ async def upload_image(body: UploadImageRequest): raise HTTPException(404, f"File not found: {body.file_path}") b64 = base64.b64encode(image_bytes).decode() mime = mimetypes.guess_type(body.file_path)[0] or "image/png" - result = await client.upload_image(b64, mime_type=mime, project_id=body.project_id, file_name=body.file_name) + try: + result = await client.upload_image(b64, mime_type=mime, project_id=body.project_id, file_name=body.file_name) + except Exception as exc: + raise HTTPException(502, f"Flow upload request failed: {exc}") + + if not isinstance(result, dict): + raise HTTPException(502, f"Unexpected upload response type: {type(result).__name__}") + if result.get("error") or (isinstance(result.get("status"), int) and result["status"] >= 400): - raise HTTPException(result.get("status", 502), result.get("error", result.get("data"))) + raise HTTPException(_flow_status_code(result.get("status"), 502), _flow_error_detail(result)) media_id = result.get("_mediaId") - return {"media_id": media_id, "raw": result.get("data", result)} + if not media_id: + raise HTTPException(502, "Upload succeeded but no media_id returned") + + media_url: str | None = None + try: + media_resp = await client.get_media( + media_id, + project_id=(body.project_id or None), + ) + if not media_resp.get("error"): + media_url = _extract_first_url(media_resp.get("data", media_resp)) + except Exception as exc: + # Non-fatal: upload is already successful; URL can be refreshed later. + logger.warning("upload_image get_media failed for %s: %s", media_id, exc) + + return {"media_id": media_id, "url": media_url, "raw": result.get("data", result)} + + +@router.post("/manual/context") +async def manual_context(body: ManualContextRequest): + """Resolve runtime context for standalone manual generation pages.""" + client = get_flow_client() + if not client.connected: + raise HTTPException(503, "Extension not connected") + project_id = await _resolve_manual_project_id(client, body.project_id, create_if_missing=body.create_if_missing) + tier = await _resolve_user_tier(client, body.user_paygate_tier) + return { + "project_id": project_id, + "user_paygate_tier": tier, + "project_source": "manual_cache_or_active", + } + + +@router.post("/manual/images") +async def manual_generate_images(body: ManualImageBatchRequest): + """Generate standalone images from many prompts (not tied to scene/request queue).""" + client = get_flow_client() + if not client.connected: + raise HTTPException(503, "Extension not connected") + if not body.items: + raise HTTPException(400, "items is required") + + project_id = await _resolve_manual_project_id(client, body.project_id, create_if_missing=True) + tier = await _resolve_user_tier(client, body.user_paygate_tier) + material_prefix = _resolve_material_prefix(body.material) + shared_custom_style = _normalize_text(body.custom_style) + + items_out: list[dict] = [] + for index, item in enumerate(body.items): + full_prompt = _compose_prompt(item.prompt, material_prefix, shared_custom_style, item.style) + if not full_prompt: + items_out.append({ + "index": index, + "status": "FAILED", + "error": "Prompt is empty after style merge", + "prompt": item.prompt, + "full_prompt": full_prompt, + "media_id": None, + "url": None, + }) + continue + + selected_image_model = _normalize_text(item.image_model_key or body.image_model_key) or None + raw = await client.generate_images( + prompt=full_prompt, + project_id=project_id, + aspect_ratio=item.aspect_ratio or body.aspect_ratio, + user_paygate_tier=tier, + character_media_ids=item.character_media_ids, + image_model_key=selected_image_model, + ) + parsed = parse_result(raw, "GENERATE_IMAGE") + + # Flow đôi khi trả "Internal error encountered" khi model key không phù hợp. + # Thử lại 1 lần bằng model mặc định để tăng tỷ lệ thành công. + retried_with_default_model = False + if ( + not parsed.success + and selected_image_model + and _is_internal_image_error(raw, parsed.error) + ): + retry_raw = await client.generate_images( + prompt=full_prompt, + project_id=project_id, + aspect_ratio=item.aspect_ratio or body.aspect_ratio, + user_paygate_tier=tier, + character_media_ids=item.character_media_ids, + image_model_key=None, + ) + retry_parsed = parse_result(retry_raw, "GENERATE_IMAGE") + retried_with_default_model = True + if retry_parsed.success: + raw = retry_raw + parsed = retry_parsed + selected_image_model = None + else: + retry_error_text = retry_parsed.error or _extract_flow_error_text(retry_raw) + if retry_error_text: + parsed.error = retry_error_text + + url = parsed.url or _extract_first_url(raw.get("data", raw)) + items_out.append({ + "index": index, + "status": "COMPLETED" if parsed.success else "FAILED", + "error": parsed.error if not parsed.success else None, + "prompt": item.prompt, + "full_prompt": full_prompt, + "aspect_ratio": item.aspect_ratio or body.aspect_ratio, + "image_model_key": selected_image_model, + "retried_with_default_model": retried_with_default_model, + "media_id": parsed.media_id, + "url": url, + }) + + return { + "project_id": project_id, + "user_paygate_tier": tier, + "material": body.material, + "custom_style": body.custom_style, + "image_model_key": _normalize_text(body.image_model_key) or None, + "total": len(items_out), + "items": items_out, + } + + +@router.post("/manual/videos") +async def manual_generate_videos(body: ManualVideoBatchRequest): + """Submit standalone videos from prompts + start/end frame media IDs.""" + client = get_flow_client() + if not client.connected: + raise HTTPException(503, "Extension not connected") + if not body.items: + raise HTTPException(400, "items is required") + + project_id = await _resolve_manual_project_id(client, body.project_id, create_if_missing=True) + tier = await _resolve_user_tier(client, body.user_paygate_tier) + material_prefix = _resolve_material_prefix(body.material) + shared_custom_style = _normalize_text(body.custom_style) + + items_out: list[dict] = [] + for index, item in enumerate(body.items): + full_prompt = _compose_prompt(item.prompt, material_prefix, shared_custom_style, item.style) + if not full_prompt: + items_out.append({ + "index": index, + "status": "FAILED", + "error": "Prompt is empty after style merge", + "prompt": item.prompt, + "full_prompt": full_prompt, + "operations": [], + "media_id": None, + "url": None, + }) + continue + + selected_video_model = _normalize_text(item.video_model_key or body.video_model_key) or None + scene_id = item.scene_id or str(uuid.uuid4()) + raw = await client.generate_video( + start_image_media_id=item.start_image_media_id, + prompt=full_prompt, + project_id=project_id, + scene_id=scene_id, + aspect_ratio=item.aspect_ratio or body.aspect_ratio, + end_image_media_id=item.end_image_media_id, + user_paygate_tier=tier, + video_model_key=selected_video_model, + ) + + parsed = parse_result(raw, "GENERATE_VIDEO") + operations = _extract_operations(raw) + status = _derive_video_submit_status(operations, parsed.success) + url = parsed.url or _extract_first_url(raw.get("data", raw)) + error_text = parsed.error + if status == "FAILED" and not error_text: + error_text = _extract_flow_error_text(raw) or "Video generation failed" + + items_out.append({ + "index": index, + "status": status, + "error": error_text, + "prompt": item.prompt, + "full_prompt": full_prompt, + "scene_id": scene_id, + "aspect_ratio": item.aspect_ratio or body.aspect_ratio, + "video_model_key": selected_video_model, + "start_image_media_id": item.start_image_media_id, + "end_image_media_id": item.end_image_media_id, + "operations": operations, + "media_id": parsed.media_id, + "url": url, + }) + + return { + "project_id": project_id, + "user_paygate_tier": tier, + "material": body.material, + "custom_style": body.custom_style, + "video_model_key": _normalize_text(body.video_model_key) or None, + "total": len(items_out), + "items": items_out, + } diff --git a/agent/api/models.py b/agent/api/models.py index aea9312..1793107 100644 --- a/agent/api/models.py +++ b/agent/api/models.py @@ -3,7 +3,7 @@ import logging from pathlib import Path -from fastapi import APIRouter +from fastapi import APIRouter, HTTPException from agent import config @@ -34,6 +34,31 @@ def _reload_config(data: dict): config.IMAGE_MODELS.update(data["image_models"]) +def _validate_video_models(data: dict): + """Reject explicit aspect/model mismatches (portrait mapped to landscape key and vice-versa).""" + video_models = data.get("video_models") or {} + for tier, gen_types in video_models.items(): + if not isinstance(gen_types, dict): + continue + for gen_type, ratios in gen_types.items(): + if not isinstance(ratios, dict): + continue + for ratio_key, model_key in ratios.items(): + if not isinstance(model_key, str): + continue + low = model_key.lower() + if ratio_key == "VIDEO_ASPECT_RATIO_PORTRAIT" and "landscape" in low and "portrait" not in low: + raise HTTPException( + 400, + f"Invalid model mapping: {tier}/{gen_type}/{ratio_key} uses landscape model key '{model_key}'", + ) + if ratio_key == "VIDEO_ASPECT_RATIO_LANDSCAPE" and "portrait" in low and "landscape" not in low: + raise HTTPException( + 400, + f"Invalid model mapping: {tier}/{gen_type}/{ratio_key} uses portrait model key '{model_key}'", + ) + + @router.get("") async def get_models(): """Return current model configuration.""" @@ -74,6 +99,7 @@ async def patch_models(body: dict): current[section][tier][gen_type] = {} current[section][tier][gen_type].update(ratios) + _validate_video_models(current) _write_models(current) _reload_config(current) logger.info("Models updated and hot-reloaded: %s", list(body.keys())) diff --git a/agent/api/projects.py b/agent/api/projects.py index c87a76b..8761ffd 100644 --- a/agent/api/projects.py +++ b/agent/api/projects.py @@ -2,6 +2,7 @@ import logging import re from datetime import datetime, timezone +from typing import Any import aiohttp from fastapi import APIRouter, HTTPException @@ -12,7 +13,9 @@ from agent.models.character import Character from agent.sdk.persistence.sqlite_repository import SQLiteRepository from agent.services.flow_client import get_flow_client +from agent.services.event_bus import event_bus from agent.utils.slugify import slugify +from agent.utils.orientation import normalize_orientation logger = logging.getLogger(__name__) @@ -129,6 +132,60 @@ def _get_repo() -> SQLiteRepository: return SQLiteRepository() +def _walk_values(node: Any): + if isinstance(node, dict): + yield node + for value in node.values(): + yield from _walk_values(value) + return + if isinstance(node, list): + for item in node: + yield from _walk_values(item) + + +def _extract_project_id_from_flow_response(flow_result: dict) -> str | None: + """Extract projectId from multiple known/legacy tRPC response shapes.""" + payload = flow_result.get("data", flow_result) + + candidates: list[str] = [] + for obj in _walk_values(payload): + if not isinstance(obj, dict): + continue + pid = obj.get("projectId") + if isinstance(pid, str) and pid.strip(): + candidates.append(pid.strip()) + + # Preserve order while de-duplicating. + unique = list(dict.fromkeys(candidates)) + if not unique: + return None + if len(unique) > 1: + logger.warning("Multiple projectId candidates in Flow response, using first: %s", unique) + return unique[0] + + +def _extract_flow_error_text(flow_result: dict) -> str | None: + payload = flow_result.get("data", flow_result) + for obj in _walk_values(payload): + if not isinstance(obj, dict): + continue + # tRPC errors are often under error.json.message + err = obj.get("error") + if isinstance(err, str) and err.strip(): + return err.strip() + if isinstance(err, dict): + for key in ("message",): + msg = err.get(key) + if isinstance(msg, str) and msg.strip(): + return msg.strip() + err_json = err.get("json") + if isinstance(err_json, dict): + msg = err_json.get("message") + if isinstance(msg, str) and msg.strip(): + return msg.strip() + return None + + @router.post("", response_model=Project) async def create(body: ProjectCreate): from agent.materials import get_material @@ -158,13 +215,18 @@ async def create(body: ProjectCreate): if flow_result.get("error"): raise HTTPException(502, f"Flow API error: {flow_result['error']}") - try: - data = flow_result.get("data", {}) - result = data["result"]["data"]["json"]["result"] - flow_project_id = result["projectId"] - except (KeyError, TypeError) as e: + status = flow_result.get("status") + if isinstance(status, int) and status >= 400: + err_msg = _extract_flow_error_text(flow_result) or "Unknown Flow error" + raise HTTPException(502, f"Flow createProject failed (HTTP {status}): {err_msg}") + + flow_project_id = _extract_project_id_from_flow_response(flow_result) + if not flow_project_id: logger.error("Unexpected Flow response: %s", flow_result) - raise HTTPException(502, f"Failed to parse Flow response: {e}") + err_msg = _extract_flow_error_text(flow_result) + if err_msg: + raise HTTPException(502, f"Flow createProject failed: {err_msg}") + raise HTTPException(502, "Failed to parse Flow response: projectId not found") logger.info("Flow project created: %s", flow_project_id) @@ -184,6 +246,7 @@ async def create(body: ProjectCreate): language=create_data.get("language", "en"), user_paygate_tier=detected_tier, material=material_id, + orientation=normalize_orientation(create_data.get("orientation", "VERTICAL")), allow_music=create_data.get("allow_music", False), allow_voice=create_data.get("allow_voice", False), ) @@ -212,6 +275,7 @@ async def create(body: ProjectCreate): await repo.link_character_to_project(flow_project_id, char.id) logger.info("%s '%s' created and linked: %s", etype, char_input["name"], char.id) + await event_bus.emit("project_created", {"id": project.id, "name": project.name}) return project @@ -234,10 +298,15 @@ async def get(pid: str): @router.patch("/{pid}", response_model=Project) async def update(pid: str, body: ProjectUpdate): repo = _get_repo() - row = await repo.update("project", pid, **body.model_dump(exclude_unset=True)) + update_data = body.model_dump(exclude_unset=True) + if update_data.get("orientation"): + update_data["orientation"] = normalize_orientation(update_data["orientation"]) + row = await repo.update("project", pid, **update_data) if not row: raise HTTPException(404, "Project not found") - return repo._row_to_project(row) + project = repo._row_to_project(row) + await event_bus.emit("project_updated", {"id": project.id, "name": project.name}) + return project @router.delete("/{pid}") @@ -245,6 +314,7 @@ async def delete(pid: str): repo = _get_repo() if not await repo.delete_project(pid): raise HTTPException(404, "Project not found") + await event_bus.emit("project_deleted", {"id": pid}) return {"ok": True} @@ -253,6 +323,7 @@ async def link_character(pid: str, cid: str): repo = _get_repo() if not await repo.link_character_to_project(pid, cid): raise HTTPException(400, "Failed to link character") + await event_bus.emit("character_linked", {"project_id": pid, "character_id": cid}) return {"ok": True} @@ -261,6 +332,7 @@ async def unlink_character(pid: str, cid: str): repo = _get_repo() if not await repo.unlink_character_from_project(pid, cid): raise HTTPException(404, "Link not found") + await event_bus.emit("character_unlinked", {"project_id": pid, "character_id": cid}) return {"ok": True} @@ -293,8 +365,10 @@ async def get_output_dir(pid: str): scenes = await repo.list_scenes(video_id) scene_count = len(scenes) if scenes else 0 - # Orientation lives on the video table, not project - video_orientation = (getattr(video, "orientation", None) if video else None) or "VERTICAL" + # Prefer video orientation when a video exists, otherwise project orientation. + video_orientation = normalize_orientation( + (getattr(video, "orientation", None) if video else None) or getattr(project, "orientation", None) or "VERTICAL" + ) now = datetime.now(timezone.utc).isoformat() meta = { @@ -332,8 +406,12 @@ class ThumbnailRequest(BaseModel): class ThumbnailResponse(BaseModel): success: bool media_id: str | None = None + # API contract (new) image_url: str | None = None output_path: str | None = None + # Backward-compatible aliases expected by older UI + url: str | None = None + local_path: str | None = None prompt: str | None = None error: str | None = None @@ -423,5 +501,7 @@ async def generate_thumbnail(pid: str, body: ThumbnailRequest): media_id=gen_result.media_id, image_url=gen_result.url, output_path=str(output_path), + url=gen_result.url, + local_path=str(output_path), prompt=full_prompt, ) diff --git a/agent/api/requests.py b/agent/api/requests.py index 6c1b5f3..470a30f 100644 --- a/agent/api/requests.py +++ b/agent/api/requests.py @@ -1,12 +1,128 @@ from fastapi import APIRouter, HTTPException from pydantic import BaseModel from typing import Optional +from datetime import datetime, timezone from agent.models.request import Request, RequestCreate from agent.models.enums import StatusType from agent.db import crud +from agent.utils.orientation import normalize_orientation router = APIRouter(prefix="/requests", tags=["requests"]) +_TYPE_ALIASES: dict[str, set[str]] = { + "GENERATE_CHARACTER_IMAGE": {"GENERATE_CHARACTER_IMAGE", "REGENERATE_CHARACTER_IMAGE", "EDIT_CHARACTER_IMAGE"}, + "GENERATE_IMAGE": {"GENERATE_IMAGE", "REGENERATE_IMAGE", "EDIT_IMAGE"}, + "GENERATE_VIDEO": {"GENERATE_VIDEO", "REGENERATE_VIDEO", "GENERATE_VIDEO_REFS"}, + "UPSCALE_VIDEO": {"UPSCALE_VIDEO", "UPSCALE_VIDEO_LOCAL"}, +} + +_STAGE_BY_TYPE: dict[str, str] = { + "GENERATE_CHARACTER_IMAGE": "character_image", + "REGENERATE_CHARACTER_IMAGE": "character_image", + "EDIT_CHARACTER_IMAGE": "character_image", + "GENERATE_IMAGE": "scene_image", + "REGENERATE_IMAGE": "scene_image", + "EDIT_IMAGE": "scene_image", + "GENERATE_VIDEO": "scene_video", + "REGENERATE_VIDEO": "scene_video", + "GENERATE_VIDEO_REFS": "scene_video", + "UPSCALE_VIDEO": "scene_upscale", + "UPSCALE_VIDEO_LOCAL": "scene_upscale", +} + + +def _expand_types(type_filter: str | None) -> set[str] | None: + if not type_filter: + return None + return _TYPE_ALIASES.get(type_filter, {type_filter}) + + +def _request_stage_key(row: dict) -> str: + """Build a stable key to collapse retries/regenerations into latest stage state.""" + req_type = row.get("type") + stage = _STAGE_BY_TYPE.get(req_type, req_type or "unknown") + orientation = normalize_orientation(row.get("orientation")) if row.get("orientation") else "NONE" + scene_id = row.get("scene_id") + character_id = row.get("character_id") + if scene_id: + return f"scene:{scene_id}:{stage}:{orientation}" + if character_id: + return f"character:{character_id}:{stage}" + return f"request:{row.get('id')}" + + +def _latest_rows_per_stage(rows: list[dict]) -> list[dict]: + """Keep only latest request per logical stage key (scene+stage+orientation or character+stage).""" + latest: dict[str, dict] = {} + for row in rows: + key = _request_stage_key(row) + prev = latest.get(key) + if not prev: + latest[key] = row + continue + prev_ts = prev.get("updated_at") or prev.get("created_at") or "" + cur_ts = row.get("updated_at") or row.get("created_at") or "" + if cur_ts >= prev_ts: + latest[key] = row + return list(latest.values()) + + +def _parse_utc(ts: str | None) -> datetime | None: + if not ts: + return None + try: + return datetime.strptime(ts, "%Y-%m-%dT%H:%M:%SZ").replace(tzinfo=timezone.utc) + except Exception: + return None + + +def _status_hint(error_message: str | None) -> str | None: + if not error_message: + return None + em = error_message.lower() + if "public_error_unsafe_generation" in em or "unsafe_generation" in em: + return "Prompt bị bộ lọc an toàn của Google Flow chặn. Hãy giảm nội dung nhạy cảm/bạo lực rồi tạo lại." + if ( + "flow_tab_not_ready" in em + or "no_flow_tab" in em + or "no flow tab" in em + or "flow tab not ready" in em + or "grecaptcha not available" in em + or "token expired" in em + ): + return "Flow tab chưa sẵn sàng. Hãy mở đúng trang Google Flow rồi thử lại." + if "captcha_failed" in em or "recaptcha" in em or "captcha" in em: + if "must request permission to access the respective host" in em or "cannot access contents of the page" in em: + return "Captcha failed: extension không truy cập được đúng tab Flow." + return "Captcha failed: đang chờ retry." + if "extension not connected" in em or "extension disconnected" in em: + return "Extension mất kết nối." + if "local_upscale_setup_required" in em: + return "Thiếu công cụ upscale local (ffmpeg/ffprobe/realesrgan/model). Cần cấu hình trước khi chạy 4K local." + if "no local source video available for local upscale" in em: + return "Upscale local cần video nguồn đã lưu trên máy. Hãy tải video local trước rồi chạy lại." + if "dispatch timeout" in em and "upscale" in em: + return "Upscale local bị timeout khi xử lý nặng. Hệ thống đã dừng request để tránh treo máy." + return error_message + + +async def _nudge_pending_request_now(row: dict) -> dict: + """Force an existing pending request (possibly in retry wait) to run ASAP.""" + if not row: + return row + if row.get("status") != "PENDING": + return row + now = datetime.now(timezone.utc) + next_retry = _parse_utc(row.get("next_retry_at")) + if next_retry and next_retry > now: + updated = await crud.update_request( + row["id"], + next_retry_at=None, + error_message="manual retry now", + ) + return updated or row + return row + class RequestUpdate(BaseModel): status: Optional[StatusType] = None @@ -23,18 +139,99 @@ class BatchRequestCreate(BaseModel): class BatchStatus(BaseModel): total: int pending: int + queued_pending: int = 0 + retry_waiting: int = 0 processing: int completed: int failed: int done: bool all_succeeded: bool orientation: Optional[str] = None + next_retry_at: Optional[str] = None + next_retry_in_sec: Optional[int] = None + status_hint: Optional[str] = None + oldest_processing_sec: Optional[int] = None + + +async def _validate_request_scope(data: dict) -> dict: + """Ensure request references stay inside the declared project/video scope. + + This prevents accidental cross-project generation when stale IDs are mixed. + Returns possibly enriched data (auto-filled project_id/video_id from scene). + """ + project_id = data.get("project_id") + video_id = data.get("video_id") + scene_id = data.get("scene_id") + character_id = data.get("character_id") + + if project_id: + project = await crud.get_project(project_id) + if not project: + raise HTTPException(404, f"Project not found: {project_id}") + + video = None + if video_id: + video = await crud.get_video(video_id) + if not video: + raise HTTPException(404, f"Video not found: {video_id}") + if project_id and video.get("project_id") != project_id: + raise HTTPException( + 400, + f"video_id {video_id} does not belong to project_id {project_id}", + ) + + if scene_id: + scene = await crud.get_scene(scene_id) + if not scene: + raise HTTPException(404, f"Scene not found: {scene_id}") + + if video_id and scene.get("video_id") != video_id: + raise HTTPException( + 400, + f"scene_id {scene_id} does not belong to video_id {video_id}", + ) + + if not video: + video = await crud.get_video(scene.get("video_id")) + if not video: + raise HTTPException( + 404, f"Video not found for scene_id {scene_id}: {scene.get('video_id')}" + ) + + if project_id and video.get("project_id") != project_id: + raise HTTPException( + 400, + f"scene_id {scene_id} does not belong to project_id {project_id}", + ) + + data.setdefault("video_id", scene.get("video_id")) + data.setdefault("project_id", video.get("project_id")) + project_id = data.get("project_id") + + if character_id: + char = await crud.get_character(character_id) + if not char: + raise HTTPException(404, f"Character not found: {character_id}") + if project_id: + linked = await crud.get_project_characters(project_id) + if not any(c.get("id") == character_id for c in linked): + raise HTTPException( + 400, + f"character_id {character_id} does not belong to project_id {project_id}", + ) + + return data @router.post("", response_model=Request) async def create(body: RequestCreate): data = body.model_dump(exclude_none=True) data["req_type"] = data.pop("type") + if data["req_type"] == "UPSCALE_VIDEO": + data["req_type"] = "UPSCALE_VIDEO_LOCAL" + if data.get("orientation"): + data["orientation"] = normalize_orientation(data["orientation"]) + data = await _validate_request_scope(data) # Reject if there's already an active request for the same scene + type scene_id = data.get("scene_id") @@ -45,10 +242,12 @@ async def create(body: RequestCreate): if r.get("type") == req_type and r.get("status") in ("PENDING", "PROCESSING")] if active: + reused = active[0] + reused = await _nudge_pending_request_now(reused) raise HTTPException( 409, f"Active {req_type} request already exists for scene {scene_id[:8]} " - f"(status={active[0]['status']}, id={active[0]['id'][:8]})" + f"(status={reused['status']}, id={reused['id'][:8]})" ) # Auto-set video orientation (symmetric with batch endpoint) @@ -64,18 +263,21 @@ async def create(body: RequestCreate): async def create_batch(body: BatchRequestCreate): """Submit multiple requests atomically. Server handles throttling (max 5 concurrent, 10s cooldown). Duplicate active requests for the same scene+type are skipped (not errors).""" - # Auto-set video orientation from the batch (tracks current active orientation) + results = [] _seen_vids: set[str] = set() for item in body.requests: - vid = item.video_id - orient = item.orientation + data = item.model_dump(exclude_none=True) + data["req_type"] = data.pop("type") + if data["req_type"] == "UPSCALE_VIDEO": + data["req_type"] = "UPSCALE_VIDEO_LOCAL" + if data.get("orientation"): + data["orientation"] = normalize_orientation(data["orientation"]) + data = await _validate_request_scope(data) + vid = data.get("video_id") + orient = data.get("orientation") if vid and orient and vid not in _seen_vids: _seen_vids.add(vid) await crud.update_video(vid, orientation=orient) - results = [] - for item in body.requests: - data = item.model_dump(exclude_none=True) - data["req_type"] = data.pop("type") scene_id = data.get("scene_id") character_id = data.get("character_id") req_type = data.get("req_type") @@ -86,7 +288,7 @@ async def create_batch(body: BatchRequestCreate): if r.get("type") == req_type and r.get("status") in ("PENDING", "PROCESSING")] if active: - results.append(active[0]) + results.append(await _nudge_pending_request_now(active[0])) continue if character_id and req_type: existing = await crud.list_requests(project_id=data.get("project_id")) @@ -95,7 +297,7 @@ async def create_batch(body: BatchRequestCreate): and r.get("type") == req_type and r.get("status") in ("PENDING", "PROCESSING")] if active: - results.append(active[0]) + results.append(await _nudge_pending_request_now(active[0])) continue results.append(await crud.create_request(**data)) return results @@ -119,27 +321,103 @@ async def batch_status(video_id: str = None, project_id: str = None, """Aggregate status for all requests matching the filter. Poll this instead of polling N individual request IDs.""" rows = await crud.list_requests(video_id=video_id, project_id=project_id) - if type: - rows = [r for r in rows if r.get("type") == type] + type_filter = _expand_types(type) + if type_filter: + rows = [r for r in rows if r.get("type") in type_filter] if orientation: - rows = [r for r in rows if r.get("orientation") == orientation] + normalized_orientation = normalize_orientation(orientation) + rows = [r for r in rows if normalize_orientation(r.get("orientation")) == normalized_orientation] + rows = _latest_rows_per_stage(rows) counts = {"PENDING": 0, "PROCESSING": 0, "COMPLETED": 0, "FAILED": 0} + now = datetime.now(timezone.utc) + queued_pending = 0 + retry_waiting = 0 + next_retry_ts: datetime | None = None + hint: str | None = None + oldest_processing_sec: int | None = None + processing_types: set[str] = set() + for r in rows: s = r.get("status", "PENDING") counts[s] = counts.get(s, 0) + 1 + if s == "PROCESSING": + rt = r.get("type") + if isinstance(rt, str) and rt: + processing_types.add(rt) + updated_at = _parse_utc(r.get("updated_at")) or _parse_utc(r.get("created_at")) + if updated_at: + age = max(0, int((now - updated_at).total_seconds())) + if oldest_processing_sec is None or age > oldest_processing_sec: + oldest_processing_sec = age + continue + if s != "PENDING": + continue + nr = _parse_utc(r.get("next_retry_at")) + if nr and nr > now: + retry_waiting += 1 + if not next_retry_ts or nr < next_retry_ts: + next_retry_ts = nr + hint = _status_hint(r.get("error_message")) + else: + queued_pending += 1 total = len(rows) + next_retry_at = next_retry_ts.strftime("%Y-%m-%dT%H:%M:%SZ") if next_retry_ts else None + next_retry_in_sec = None + if next_retry_ts: + next_retry_in_sec = max(0, int((next_retry_ts - now).total_seconds())) + if not hint and oldest_processing_sec is not None: + upscale_types = {"UPSCALE_VIDEO", "UPSCALE_VIDEO_LOCAL"} + if processing_types and processing_types.issubset(upscale_types): + if oldest_processing_sec >= 300: + hint = ( + f"Upscale local đang chạy ({oldest_processing_sec}s). " + "Tác vụ này có thể mất vài phút tùy độ dài clip." + ) + elif oldest_processing_sec > 120: + hint = f"Có request đang PROCESSING lâu ({oldest_processing_sec}s), có thể đang kẹt captcha/tab Flow." return BatchStatus( total=total, pending=counts["PENDING"], + queued_pending=queued_pending, + retry_waiting=retry_waiting, processing=counts["PROCESSING"], orientation=orientation, completed=counts["COMPLETED"], failed=counts["FAILED"], done=(counts["PENDING"] == 0 and counts["PROCESSING"] == 0), all_succeeded=(counts["COMPLETED"] == total and total > 0), + next_retry_at=next_retry_at, + next_retry_in_sec=next_retry_in_sec, + status_hint=hint, + oldest_processing_sec=oldest_processing_sec, ) +@router.get("/failed") +async def list_failed(video_id: str = None, project_id: str = None, type: str = None, orientation: str = None): + """Return failed request details (scene_id, error_message, type) for error display.""" + rows = await crud.list_requests(video_id=video_id, project_id=project_id) + type_filter = _expand_types(type) + if type_filter: + rows = [r for r in rows if r.get("type") in type_filter] + if orientation: + normalized = normalize_orientation(orientation) + rows = [r for r in rows if normalize_orientation(r.get("orientation")) == normalized] + rows = [r for r in _latest_rows_per_stage(rows) if r.get("status") == "FAILED"] + return [ + { + "id": r.get("id"), + "scene_id": r.get("scene_id"), + "character_id": r.get("character_id"), + "type": r.get("type"), + "error_message": r.get("error_message") or "Lỗi không xác định", + "retry_count": r.get("retry_count", 0), + "updated_at": r.get("updated_at"), + } + for r in rows + ] + + @router.get("/{rid}", response_model=Request) async def get(rid: str): r = await crud.get_request(rid) diff --git a/agent/api/scenes.py b/agent/api/scenes.py index df8e3b7..e54eb25 100644 --- a/agent/api/scenes.py +++ b/agent/api/scenes.py @@ -1,6 +1,10 @@ from fastapi import APIRouter, HTTPException from agent.models.scene import Scene, SceneCreate, SceneUpdate from agent.sdk.persistence.sqlite_repository import SQLiteRepository +from agent.services.event_bus import event_bus +from agent.utils.paths import scene_tts_path +from agent.utils.slugify import slugify +from agent.config import OUTPUT_DIR import json router = APIRouter(prefix="/scenes", tags=["scenes"]) @@ -8,7 +12,29 @@ _repo = SQLiteRepository() -def _scene_to_flat(sdk_scene) -> dict: +async def _project_slug_for_video(video_id: str) -> str | None: + video = await _repo.get_video(video_id) + if not video: + return None + project = await _repo.get_project(video.project_id) + if not project: + return None + return slugify(project.name) + + +def _resolve_tts_audio_path(project_slug: str | None, scene_id: str, display_order: int) -> str | None: + if not project_slug: + return None + tts_file = scene_tts_path(project_slug, display_order, scene_id) + if tts_file.exists(): + return str(tts_file) + legacy_tts = OUTPUT_DIR / project_slug / "tts" / f"{scene_id}.wav" + if legacy_tts.exists(): + return str(legacy_tts) + return None + + +def _scene_to_flat(sdk_scene, project_slug: str | None = None) -> dict: """Convert SDK Scene domain model to flat dict matching API response shape.""" repo = SQLiteRepository() flat = repo._scene_to_updates(sdk_scene) @@ -19,7 +45,12 @@ def _scene_to_flat(sdk_scene) -> dict: flat["transition_prompt"] = sdk_scene.transition_prompt flat["chain_type"] = sdk_scene.chain_type flat["source"] = sdk_scene.source + # API contract uses list[str], not JSON string. flat["character_names"] = sdk_scene.character_names + tts_audio_path = _resolve_tts_audio_path(project_slug, sdk_scene.id, sdk_scene.display_order) + narrator_text = (sdk_scene.narrator_text or "").strip() + flat["tts_audio_path"] = tts_audio_path + flat["tts_status"] = "COMPLETED" if tts_audio_path else ("PENDING" if narrator_text else "PENDING") flat["created_at"] = sdk_scene.created_at flat["updated_at"] = sdk_scene.updated_at return flat @@ -57,13 +88,21 @@ async def create(body: SceneCreate): await _repo.update("scene", s.id, display_order=s.display_order + 1) sdk_scene = await _repo.create_scene(**data) - return _scene_to_flat(sdk_scene) + project_slug = await _project_slug_for_video(sdk_scene.video_id) + scene = _scene_to_flat(sdk_scene, project_slug) + await event_bus.emit("scene_created", { + "id": sdk_scene.id, + "video_id": sdk_scene.video_id, + "display_order": sdk_scene.display_order, + }) + return scene @router.get("", response_model=list[Scene]) async def list_by_video(video_id: str): scenes = await _repo.list_scenes(video_id) - return [_scene_to_flat(s) for s in scenes] + project_slug = await _project_slug_for_video(video_id) + return [_scene_to_flat(s, project_slug) for s in scenes] @router.get("/{sid}", response_model=Scene) @@ -71,7 +110,8 @@ async def get(sid: str): sdk_scene = await _repo.get_scene(sid) if not sdk_scene: raise HTTPException(404, "Scene not found") - return _scene_to_flat(sdk_scene) + project_slug = await _project_slug_for_video(sdk_scene.video_id) + return _scene_to_flat(sdk_scene, project_slug) @router.patch("/{sid}", response_model=Scene) @@ -85,13 +125,25 @@ async def update(sid: str, body: SceneUpdate): if not row: raise HTTPException(404, "Scene not found") sdk_scene = _repo._row_to_scene(row) - return _scene_to_flat(sdk_scene) + project_slug = await _project_slug_for_video(sdk_scene.video_id) + scene = _scene_to_flat(sdk_scene, project_slug) + await event_bus.emit("scene_updated", { + "id": sdk_scene.id, + "video_id": sdk_scene.video_id, + "display_order": sdk_scene.display_order, + }) + return scene @router.delete("/{sid}") async def delete(sid: str): + scene = await _repo.get_scene(sid) if not await _repo.delete("scene", sid): raise HTTPException(404, "Scene not found") + await event_bus.emit("scene_deleted", { + "id": sid, + "video_id": scene.video_id if scene else None, + }) return {"ok": True} diff --git a/agent/api/tts.py b/agent/api/tts.py index e2d3d06..74cd9f2 100644 --- a/agent/api/tts.py +++ b/agent/api/tts.py @@ -3,6 +3,8 @@ import json import logging import re +import shutil +import subprocess from pathlib import Path from fastapi import APIRouter, HTTPException @@ -13,15 +15,22 @@ from agent.models.tts import ( TTSGenerateRequest, TTSGenerateResponse, + TTSSettingsResponse, + TTSSettingsUpdateRequest, + TTSCatalogResponse, NarrateVideoRequest, NarrateVideoResponse, SceneNarrationResult, VoiceTemplateRequest, + VoiceTemplateImportRequest, VoiceTemplateResponse, VoiceTemplateListItem, ) from agent.services.tts import generate_speech, generate_video_narration +from agent.services.tts_catalog import load_tts_catalog from agent.services.post_process import add_narration +from agent.services.tts_settings import get_tts_settings_public, update_tts_settings +from agent.utils.orientation import normalize_orientation logger = logging.getLogger(__name__) @@ -64,6 +73,35 @@ def _validate_ref_audio(ref_audio: str) -> None: raise HTTPException(400, "ref_audio must be within allowed directories") +@router.get("/tts/settings", response_model=TTSSettingsResponse) +async def get_tts_settings(): + """Get current TTS provider settings (public-safe fields).""" + return TTSSettingsResponse(**get_tts_settings_public()) + + +@router.patch("/tts/settings", response_model=TTSSettingsResponse) +async def patch_tts_settings(body: TTSSettingsUpdateRequest): + """Update TTS provider settings.""" + update_tts_settings( + provider=body.provider, + elevenlabs_api_base=body.elevenlabs_api_base, + elevenlabs_api_key=body.elevenlabs_api_key, + clear_elevenlabs_api_key=body.clear_elevenlabs_api_key, + elevenlabs_model_id=body.elevenlabs_model_id, + elevenlabs_default_voice_id=body.elevenlabs_default_voice_id, + elevenlabs_timeout_sec=body.elevenlabs_timeout_sec, + elevenlabs_max_retries=body.elevenlabs_max_retries, + ) + return TTSSettingsResponse(**get_tts_settings_public()) + + +@router.get("/tts/catalog", response_model=TTSCatalogResponse) +async def get_tts_catalog(refresh: bool = False): + """Get provider catalog for UI dropdowns (models + voices).""" + data = await load_tts_catalog(force_refresh=bool(refresh)) + return TTSCatalogResponse(**data) + + @router.post("/tts/generate", response_model=TTSGenerateResponse) async def tts_generate(body: TTSGenerateRequest): """Generate speech for a single text string. Returns path to WAV file.""" @@ -83,10 +121,12 @@ async def tts_generate(body: TTSGenerateRequest): ref_audio=body.ref_audio, ref_text=body.ref_text, speed=body.speed, + voice_id=body.voice_id, + model_id=body.model_id, ) except Exception as e: logger.exception("TTS generation failed") - raise HTTPException(500, "TTS generation failed") + raise HTTPException(500, str(e) or "TTS generation failed") duration = _wav_duration(audio_path) return TTSGenerateResponse(audio_path=audio_path, duration=duration) @@ -128,6 +168,8 @@ async def narrate_video(vid: str, body: NarrateVideoRequest): instruct = body.instruct or project.get("narrator_voice") ref_audio = body.ref_audio or project.get("narrator_ref_audio") ref_text = body.ref_text + voice_id = body.voice_id + model_id = body.model_id if body.template: meta = _load_templates_meta() @@ -136,6 +178,10 @@ async def narrate_video(vid: str, body: NarrateVideoRequest): tmpl = meta[body.template] ref_audio = tmpl["audio_path"] ref_text = tmpl.get("text") + if not voice_id: + voice_id = tmpl.get("voice_id") + if not model_id: + model_id = tmpl.get("model_id") logger.info("Using voice template '%s' as reference", body.template) elif ref_audio and not ref_text: # Try to auto-resolve ref_text from template metadata @@ -143,6 +189,10 @@ async def narrate_video(vid: str, body: NarrateVideoRequest): for tmpl in meta.values(): if tmpl["audio_path"] == ref_audio: ref_text = tmpl.get("text") + if not voice_id: + voice_id = tmpl.get("voice_id") + if not model_id: + model_id = tmpl.get("model_id") logger.info("Auto-resolved ref_text from template '%s'", tmpl["name"]) break @@ -161,9 +211,11 @@ async def narrate_video(vid: str, body: NarrateVideoRequest): ref_audio=ref_audio, ref_text=ref_text, speed=body.speed, + voice_id=voice_id, + model_id=model_id, ) - orientation = body.orientation.upper() + orientation = normalize_orientation(body.orientation) scene_results = [] for r in raw_results: @@ -230,10 +282,12 @@ async def create_voice_template(body: VoiceTemplateRequest): output_path=wav_path, instruct=body.instruct, speed=body.speed, + voice_id=body.voice_id, + model_id=body.model_id, ) except Exception as e: logger.exception("Voice template generation failed") - raise HTTPException(500, "Voice template generation failed") + raise HTTPException(500, str(e) or "Voice template generation failed") duration = _wav_duration(wav_path) @@ -244,13 +298,84 @@ async def create_voice_template(body: VoiceTemplateRequest): "audio_path": wav_path, "text": body.text, "instruct": body.instruct, + "voice_id": body.voice_id or "", + "model_id": body.model_id or "", "duration": duration, } _save_templates_meta(meta) return VoiceTemplateResponse( name=body.name, audio_path=wav_path, text=body.text, - instruct=body.instruct, duration=duration, + instruct=body.instruct, voice_id=body.voice_id, model_id=body.model_id, duration=duration, + ) + + +@router.post("/tts/templates/import", response_model=VoiceTemplateResponse) +async def import_voice_template(body: VoiceTemplateImportRequest): + """Import an existing local audio file as a template (fk:import-voice parity).""" + _validate_template_name(body.name) + + src = Path(body.audio_path).expanduser().resolve() + if not src.exists(): + raise HTTPException(404, f"Audio file not found: {body.audio_path}") + if not src.is_file(): + raise HTTPException(400, "audio_path must be a file") + + TEMPLATES_DIR.mkdir(parents=True, exist_ok=True) + target = (TEMPLATES_DIR / f"{body.name}.wav").resolve() + + # Import strategy: + # - copy WAV directly when allowed + # - otherwise transcode to 24k WAV for OmniVoice compatibility + if src.suffix.lower() == ".wav" and body.copy_audio: + try: + shutil.copyfile(src, target) + except Exception as e: + logger.exception("Failed to copy template audio") + raise HTTPException(500, f"Failed to import template audio: {e}") + else: + cmd = [ + "ffmpeg", + "-y", + "-i", + str(src), + "-ar", + "24000", + "-ac", + "1", + "-c:a", + "pcm_s16le", + str(target), + ] + try: + result = subprocess.run(cmd, capture_output=True, text=True, timeout=120) + except Exception as e: + raise HTTPException(500, f"ffmpeg failed while importing voice template: {e}") + if result.returncode != 0: + logger.error("ffmpeg import failed: %s", (result.stderr or "")[-400:]) + raise HTTPException(500, "Failed to convert audio to WAV for template import") + + duration = _wav_duration(str(target)) + meta = _load_templates_meta() + meta[body.name] = { + "name": body.name, + "audio_path": str(target), + "text": body.text, + "instruct": body.instruct, + "voice_id": body.voice_id or "", + "model_id": body.model_id or "", + "duration": duration, + } + _save_templates_meta(meta) + + return VoiceTemplateResponse( + name=body.name, + audio_path=str(target), + text=body.text, + instruct=body.instruct, + voice_id=body.voice_id, + model_id=body.model_id, + duration=duration, ) @@ -259,7 +384,13 @@ async def list_voice_templates(): """List all saved voice templates.""" meta = _load_templates_meta() return [ - VoiceTemplateListItem(name=v["name"], audio_path=v["audio_path"], duration=v.get("duration")) + VoiceTemplateListItem( + name=v["name"], + audio_path=v["audio_path"], + voice_id=v.get("voice_id"), + model_id=v.get("model_id"), + duration=v.get("duration"), + ) for v in meta.values() ] diff --git a/agent/api/videos.py b/agent/api/videos.py index 1a57e5c..9ec71cd 100644 --- a/agent/api/videos.py +++ b/agent/api/videos.py @@ -1,11 +1,32 @@ +import asyncio +import json +import logging +import shutil +import subprocess +from datetime import datetime, timezone +from pathlib import Path +from typing import Any +from urllib.parse import parse_qs, quote, unquote, urlparse + +import aiohttp from fastapi import APIRouter, HTTPException +from pydantic import BaseModel + +from agent.config import OUTPUT_DIR, SHARED_OUTPUT_DIR from agent.models.video import Video, VideoCreate, VideoUpdate +from agent.models.enums import ChainType, SceneSource from agent.sdk.persistence.sqlite_repository import SQLiteRepository -from dataclasses import asdict +from agent.services.event_bus import event_bus +from agent.services.flow_client import get_flow_client +from agent.services.post_process import add_music, add_narration, merge_videos, trim_video +from agent.utils.paths import resolve_4k_file, scene_filename, scene_tts_path +from agent.utils.slugify import slugify +from agent.utils.orientation import normalize_orientation router = APIRouter(prefix="/videos", tags=["videos"]) _repo = SQLiteRepository() +logger = logging.getLogger(__name__) def _video_to_flat(sdk_video) -> dict: @@ -31,9 +52,511 @@ def _video_to_flat(sdk_video) -> dict: } +class ConcatRequest(BaseModel): + project_id: str | None = None + orientation: str | None = None + with_narrator: bool = True + with_music: bool = False + force_4k: bool = False + fit_narrator: bool = False + narrator_buffer: float = 0.5 + export_root_dir: str | None = None + export_assets: bool = True + + +class ConcatResponse(BaseModel): + output_path: str + scenes: int + orientation: str + resolution: str + with_narrator: bool + with_music: bool + fit_narrator: bool = False + narrator_buffer: float = 0.5 + export_dir: str | None = None + exported_images: int = 0 + exported_videos: int = 0 + failed_assets: int = 0 + + +class DownloadAssetsRequest(BaseModel): + project_id: str | None = None + orientation: str | None = None + rebind_scene_urls: bool = True + + +class DownloadAssetsResponse(BaseModel): + ok: bool + video_id: str + orientation: str + download_dir: str + images_downloaded: int + videos_downloaded: int + scene_url_rebound: int + failed: list[str] = [] + + +class ScriptScenePayload(BaseModel): + display_order: int | None = None + prompt: str | None = None + image_prompt: str | None = None + video_prompt: str | None = None + narrator_text: str | None = None + character_names: list[str] | str | None = None + transition_prompt: str | None = None + chain_type: ChainType = "ROOT" + source: SceneSource = "root" + + +class ScriptVideoMetaPayload(BaseModel): + title: str | None = None + description: str | None = None + orientation: str | None = None + + +class ScriptImportRequest(BaseModel): + format_version: int | None = None + title: str | None = None + description: str | None = None + orientation: str | None = None + video: ScriptVideoMetaPayload | None = None + scenes: list[ScriptScenePayload] + replace_existing: bool = True + clear_requests: bool = True + + +class ScriptImportResponse(BaseModel): + ok: bool + video_id: str + scenes_total: int + deleted_scenes: int + deleted_requests: int + orientation: str + title: str + + +def _probe_resolution(path: Path) -> tuple[int, int] | None: + cmd = [ + "ffprobe", + "-v", "error", + "-select_streams", "v:0", + "-show_entries", "stream=width,height", + "-of", "csv=p=0:s=x", + str(path), + ] + result = subprocess.run(cmd, capture_output=True, text=True, timeout=20) + if result.returncode != 0: + return None + text = result.stdout.strip() + if "x" not in text: + return None + try: + w, h = text.split("x", 1) + return int(w), int(h) + except ValueError: + return None + + +def _probe_duration(path: Path) -> float | None: + cmd = [ + "ffprobe", + "-v", "error", + "-show_entries", "format=duration", + "-of", "csv=p=0", + str(path), + ] + result = subprocess.run(cmd, capture_output=True, text=True, timeout=20) + if result.returncode != 0: + return None + try: + return float(result.stdout.strip()) + except ValueError: + return None + + +def _normalize_clip(input_path: Path, output_path: Path, width: int, height: int) -> bool: + output_path.parent.mkdir(parents=True, exist_ok=True) + cmd = [ + "ffmpeg", "-y", + "-i", str(input_path), + "-map", "0:v:0", + "-map", "0:a?", + "-c:v", "libx264", + "-preset", "fast", + "-crf", "18", + "-vf", f"scale={width}:{height}:force_original_aspect_ratio=decrease,pad={width}:{height}:(ow-iw)/2:(oh-ih)/2", + "-r", "24", + "-pix_fmt", "yuv420p", + "-c:a", "aac", + "-b:a", "192k", + "-movflags", "+faststart", + str(output_path), + ] + result = subprocess.run(cmd, capture_output=True, text=True, timeout=300) + if result.returncode != 0: + logger.error("Normalize failed for %s: %s", input_path, result.stderr[-300:]) + return False + return True + + +def _extract_first_url(payload) -> str | None: + def _is_direct_media_url(url: str) -> bool: + low = (url or "").lower() + if not low.startswith("http"): + return False + if "media.getmediaurlredirect" in low: + return False + if low.startswith("https://flow-content.google/"): + return True + if low.startswith("https://storage.googleapis.com/"): + return True + if "googleusercontent.com/" in low: + return True + return False + + if isinstance(payload, dict): + for key in ("fifeUrl", "servingUri", "url", "imageUri", "videoUri"): + value = payload.get(key) + if isinstance(value, str) and _is_direct_media_url(value): + return value + for value in payload.values(): + found = _extract_first_url(value) + if found: + return found + return None + if isinstance(payload, list): + for item in payload: + found = _extract_first_url(item) + if found: + return found + return None + return None + + +async def _download_file(url: str, output_path: Path) -> bool: + output_path.parent.mkdir(parents=True, exist_ok=True) + try: + connector = aiohttp.TCPConnector(ssl=False) + timeout = aiohttp.ClientTimeout(total=180) + async with aiohttp.ClientSession(connector=connector, timeout=timeout) as session: + async with session.get(url) as resp: + if resp.status != 200: + return False + output_path.write_bytes(await resp.read()) + return True + except Exception: + return False + + +async def _refresh_media_url(media_id: str | None) -> str | None: + if not media_id: + return None + client = get_flow_client() + if not client.connected: + return None + result = await client.get_media(media_id) + if result.get("error"): + return None + return _extract_first_url(result.get("data", result)) + + +def _find_music_file(project_slug: str) -> Path | None: + project_music_dir = OUTPUT_DIR / project_slug / "music" + candidates = [] + for root in (project_music_dir, SHARED_OUTPUT_DIR / "music"): + if not root.exists(): + continue + for pattern in ("*.wav", "*.mp3", "*.m4a"): + candidates.extend(root.glob(pattern)) + if not candidates: + return None + return max(candidates, key=lambda p: p.stat().st_mtime) + + +async def _resolve_scene_source(scene, orientation: str, project_slug: str) -> Path: + local_4k = resolve_4k_file(project_slug, scene.display_order, scene.id) + if local_4k and local_4k.exists(): + return local_4k + + slot = scene.vertical if orientation == "VERTICAL" else scene.horizontal + for candidate in (slot.upscale.url, slot.video.url): + if not candidate or candidate.startswith("http"): + continue + p = Path(candidate) + if p.exists(): + return p + + remote_url = slot.upscale.url or slot.video.url + media_id = slot.upscale.media_id or slot.video.media_id + if not remote_url: + raise RuntimeError(f"Scene {scene.display_order + 1} has no generated {orientation.lower()} video") + + target = OUTPUT_DIR / project_slug / "4k" / f"scene_{scene.display_order:03d}_{scene.id}.mp4" + ok = await _download_file(remote_url, target) + if not ok: + refreshed = await _refresh_media_url(media_id) + if refreshed: + ok = await _download_file(refreshed, target) + if not ok or not target.exists(): + raise RuntimeError(f"Failed downloading scene {scene.display_order + 1} video source") + return target + + +def _suffix_from_uri(uri: str | None, default_suffix: str) -> str: + if not uri: + return default_suffix + try: + parsed = urlparse(uri) + suffix = Path(parsed.path).suffix.lower() + except Exception: + suffix = "" + if 1 <= len(suffix) <= 8: + return suffix + return default_suffix + + +def _parse_character_names(raw: list[str] | str | None) -> list[str]: + if isinstance(raw, list): + return [str(item).strip() for item in raw if str(item).strip()] + if isinstance(raw, str): + text = raw.strip() + if not text: + return [] + # Try JSON first to support edited export payloads. + if text.startswith("["): + try: + parsed = json.loads(text) + except Exception: + parsed = None + if isinstance(parsed, list): + return [str(item).strip() for item in parsed if str(item).strip()] + return [part.strip() for part in text.split(",") if part.strip()] + return [] + + +def _coalesce_script_text(*values: str | None) -> str: + for value in values: + if value and value.strip(): + return value.strip() + return "" + + +def _build_local_media_proxy_url(path: Path) -> str: + return f"http://127.0.0.1:8100/api/flow/local-media?path={quote(str(path), safe='')}" + + +def _extract_local_media_path(raw_url: str | None) -> Path | None: + if not isinstance(raw_url, str): + return None + text = raw_url.strip() + if not text: + return None + + if text.startswith("http://") or text.startswith("https://"): + try: + parsed = urlparse(text) + host = (parsed.hostname or "").lower() + if host not in ("127.0.0.1", "localhost"): + return None + if parsed.path.rstrip("/") != "/api/flow/local-media": + return None + raw_path = (parse_qs(parsed.query).get("path") or [None])[0] + if not isinstance(raw_path, str) or not raw_path.strip(): + return None + candidate = Path(unquote(raw_path)).expanduser() + return candidate if candidate.is_absolute() else None + except Exception: + return None + + if text.startswith("file://"): + try: + parsed = urlparse(text) + candidate = Path(unquote(parsed.path)).expanduser() + return candidate if candidate.is_absolute() else None + except Exception: + return None + + candidate = Path(text).expanduser() + return candidate if candidate.is_absolute() else None + + +async def _copy_file(src: Path, dst: Path) -> bool: + try: + try: + if src.resolve() == dst.resolve(): + return True + except Exception: + pass + dst.parent.mkdir(parents=True, exist_ok=True) + await asyncio.to_thread(shutil.copy2, src, dst) + return True + except Exception: + return False + + +async def _materialize_media_to_local( + *, + source_url: str | None, + media_id: str | None, + default_suffix: str, + target_base_path: Path, +) -> Path | None: + local = _extract_local_media_path(source_url) + if local and local.exists() and local.is_file(): + target = target_base_path.with_suffix(local.suffix or default_suffix) + if await _copy_file(local, target): + return target + + if source_url and source_url.startswith("http"): + suffix = _suffix_from_uri(source_url, default_suffix) + target = target_base_path.with_suffix(suffix) + if await _download_file(source_url, target): + return target + + refreshed = await _refresh_media_url(media_id) + if refreshed: + suffix = _suffix_from_uri(refreshed, default_suffix) + target = target_base_path.with_suffix(suffix) + if await _download_file(refreshed, target): + return target + + return None + + +def _scene_local_sources(scene, orientation: str): + primary_vertical = normalize_orientation(orientation) == "VERTICAL" + ordered = ( + [("vertical", scene.vertical), ("horizontal", scene.horizontal)] + if primary_vertical + else [("horizontal", scene.horizontal), ("vertical", scene.vertical)] + ) + + image_source = None + video_source = None + for prefix, slot in ordered: + if not image_source and (slot.image.url or slot.image.media_id): + image_source = { + "field": f"{prefix}_image_url", + "status_field": f"{prefix}_image_status", + "url": slot.image.url, + "media_id": slot.image.media_id, + "default_suffix": ".png", + "kind": "image", + "axis": prefix, + } + if not video_source: + if slot.upscale.url or slot.upscale.media_id: + video_source = { + "field": f"{prefix}_upscale_url", + "status_field": f"{prefix}_upscale_status", + "url": slot.upscale.url, + "media_id": slot.upscale.media_id, + "default_suffix": ".mp4", + "kind": "upscale", + "axis": prefix, + } + elif slot.video.url or slot.video.media_id: + video_source = { + "field": f"{prefix}_video_url", + "status_field": f"{prefix}_video_status", + "url": slot.video.url, + "media_id": slot.video.media_id, + "default_suffix": ".mp4", + "kind": "video", + "axis": prefix, + } + if image_source and video_source: + break + + return image_source, video_source + + +def _canonical_scene_target_base_path(project_slug: str, scene, source: dict) -> Path: + kind = str(source.get("kind") or "image") + axis = str(source.get("axis") or "horizontal") + scene_idx = int(scene.display_order) + 1 + ext = "png" if kind == "image" else "mp4" + canonical_name = scene_filename(scene_idx, scene.id, ext=ext) + subdir = "images" if kind == "image" else ("upscale" if kind == "upscale" else "videos") + return OUTPUT_DIR / project_slug / subdir / axis / canonical_name + + +async def _export_scene_assets( + scenes: list, + orientation: str, + project_slug: str, + export_dir: Path, +) -> tuple[int, int, int]: + images_dir = export_dir / "images" + videos_dir = export_dir / "videos" + images_dir.mkdir(parents=True, exist_ok=True) + videos_dir.mkdir(parents=True, exist_ok=True) + + exported_images = 0 + exported_videos = 0 + failed_assets = 0 + + for scene in scenes: + scene_idx = int(scene.display_order) + 1 + file_base = f"scene_{scene_idx:03d}" + slot = scene.vertical if orientation == "VERTICAL" else scene.horizontal + + # Export image + image_local_ok = False + image_url = slot.image.url + image_media_id = slot.image.media_id + image_local = None + if image_url and not image_url.startswith("http"): + p = Path(image_url) + if p.exists(): + image_local = p + + image_suffix = _suffix_from_uri(image_url, ".png") + image_target = images_dir / f"{file_base}{image_suffix}" + if image_local and await _copy_file(image_local, image_target): + image_local_ok = True + elif image_url and image_url.startswith("http"): + image_local_ok = await _download_file(image_url, image_target) + if not image_local_ok and image_media_id: + refreshed = await _refresh_media_url(image_media_id) + if refreshed: + refreshed_target = images_dir / f"{file_base}{_suffix_from_uri(refreshed, image_suffix)}" + image_local_ok = await _download_file(refreshed, refreshed_target) + if image_local_ok: + exported_images += 1 + else: + failed_assets += 1 + + # Export video source in selected orientation (prefer local resolved path) + video_local_ok = False + try: + scene_source = await _resolve_scene_source(scene, orientation, project_slug) + video_target = videos_dir / f"{file_base}.mp4" + video_local_ok = await _copy_file(scene_source, video_target) + except Exception: + video_local_ok = False + if video_local_ok: + exported_videos += 1 + else: + failed_assets += 1 + + return exported_images, exported_videos, failed_assets + + @router.post("", response_model=Video) async def create(body: VideoCreate): - sdk_video = await _repo.create_video(**body.model_dump(exclude_none=True)) + create_data = body.model_dump(exclude_none=True) + if create_data.get("orientation"): + create_data["orientation"] = normalize_orientation(create_data["orientation"]) + if "orientation" not in create_data or not create_data["orientation"]: + project = await _repo.get_project(body.project_id) + create_data["orientation"] = normalize_orientation(project.orientation if project else "VERTICAL") + sdk_video = await _repo.create_video(**create_data) + await event_bus.emit("video_created", { + "id": sdk_video.id, + "project_id": sdk_video.project_id, + "orientation": sdk_video.orientation, + }) return _video_to_flat(sdk_video) @@ -53,15 +576,505 @@ async def get(vid: str): @router.patch("/{vid}", response_model=Video) async def update(vid: str, body: VideoUpdate): - row = await _repo.update("video", vid, **body.model_dump(exclude_unset=True)) + update_data = body.model_dump(exclude_unset=True) + if update_data.get("orientation"): + update_data["orientation"] = normalize_orientation(update_data["orientation"]) + row = await _repo.update("video", vid, **update_data) if not row: raise HTTPException(404, "Video not found") sdk_video = _repo._row_to_video(row) + await event_bus.emit("video_updated", { + "id": sdk_video.id, + "project_id": sdk_video.project_id, + "orientation": sdk_video.orientation, + }) return _video_to_flat(sdk_video) @router.delete("/{vid}") async def delete(vid: str): + video = await _repo.get_video(vid) if not await _repo.delete("video", vid): raise HTTPException(404, "Video not found") + await event_bus.emit("video_deleted", {"id": vid, "project_id": video.project_id if video else None}) return {"ok": True} + + +@router.post("/{vid}/download-assets", response_model=DownloadAssetsResponse) +async def download_assets(vid: str, body: DownloadAssetsRequest): + sdk_video = await _repo.get_video(vid) + if not sdk_video: + raise HTTPException(404, "Video not found") + project = await _repo.get_project(sdk_video.project_id) + if not project: + raise HTTPException(404, "Project not found") + + if body.project_id and body.project_id != sdk_video.project_id: + raise HTTPException(400, "project_id does not match this video") + + orientation = normalize_orientation( + body.orientation or sdk_video.orientation or project.orientation or "VERTICAL" + ) + scenes = sorted(await _repo.list_scenes(vid), key=lambda s: s.display_order) + if not scenes: + raise HTTPException(400, "No scenes found") + + project_slug = slugify(project.name) or f"project_{project.id[:8]}" + video_slug = slugify(sdk_video.title) or f"video_{sdk_video.id[:8]}" + video_order = int(sdk_video.display_order or 0) + 1 + legacy_download_dir = OUTPUT_DIR / project_slug / "downloads" / f"video_{video_order:03d}_{video_slug}" + canonical_root_dir = OUTPUT_DIR / project_slug + + images_downloaded = 0 + videos_downloaded = 0 + scene_url_rebound = 0 + failed: list[str] = [] + + for scene in scenes: + scene_idx = int(scene.display_order) + 1 + image_source, video_source = _scene_local_sources(scene, orientation) + + if image_source: + target = await _materialize_media_to_local( + source_url=image_source["url"], + media_id=image_source["media_id"], + default_suffix=image_source["default_suffix"], + target_base_path=_canonical_scene_target_base_path(project_slug, scene, image_source), + ) + if target: + images_downloaded += 1 + if body.rebind_scene_urls: + proxy_url = _build_local_media_proxy_url(target) + await _repo.update( + "scene", + scene.id, + **{ + image_source["field"]: proxy_url, + image_source["status_field"]: "COMPLETED", + }, + ) + scene_url_rebound += 1 + else: + failed.append(f"scene_{scene_idx:03d}: image") + + if video_source: + target = await _materialize_media_to_local( + source_url=video_source["url"], + media_id=video_source["media_id"], + default_suffix=video_source["default_suffix"], + target_base_path=_canonical_scene_target_base_path(project_slug, scene, video_source), + ) + if target: + videos_downloaded += 1 + if body.rebind_scene_urls: + proxy_url = _build_local_media_proxy_url(target) + await _repo.update( + "scene", + scene.id, + **{ + video_source["field"]: proxy_url, + video_source["status_field"]: "COMPLETED", + }, + ) + scene_url_rebound += 1 + else: + failed.append(f"scene_{scene_idx:03d}: video") + + await event_bus.emit( + "scene_updated", + { + "id": "download-assets", + "video_id": vid, + "display_order": 0, + }, + ) + await event_bus.emit( + "video_assets_downloaded", + { + "id": vid, + "project_id": sdk_video.project_id, + "orientation": orientation, + "download_dir": str(canonical_root_dir), + "images_downloaded": images_downloaded, + "videos_downloaded": videos_downloaded, + }, + ) + + # Best-effort cleanup: old builds wrote duplicated local copies under output//downloads/. + # We now persist canonical files under output//{images,videos,upscale}/... + try: + if legacy_download_dir.exists() and legacy_download_dir.is_dir(): + shutil.rmtree(legacy_download_dir) + except Exception: + # Non-fatal; local assets are already rebound to canonical paths above. + pass + + return DownloadAssetsResponse( + ok=True, + video_id=vid, + orientation=orientation, + download_dir=str(canonical_root_dir), + images_downloaded=images_downloaded, + videos_downloaded=videos_downloaded, + scene_url_rebound=scene_url_rebound, + failed=failed, + ) + + +@router.get("/{vid}/script-export") +async def export_script(vid: str): + sdk_video = await _repo.get_video(vid) + if not sdk_video: + raise HTTPException(404, "Video not found") + project = await _repo.get_project(sdk_video.project_id) + if not project: + raise HTTPException(404, "Project not found") + + scenes = sorted(await _repo.list_scenes(vid), key=lambda s: s.display_order) + return { + "format_version": 1, + "exported_at": datetime.now(timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ"), + "project": { + "id": project.id, + "name": project.name, + "orientation": project.orientation, + "language": project.language, + "material": project.material, + }, + "video": { + "id": sdk_video.id, + "title": sdk_video.title, + "description": sdk_video.description, + "orientation": sdk_video.orientation or project.orientation, + }, + "scenes": [ + { + "display_order": scene.display_order, + "prompt": scene.prompt, + "image_prompt": scene.image_prompt, + "video_prompt": scene.video_prompt, + "narrator_text": scene.narrator_text, + "character_names": scene.character_names or [], + "transition_prompt": scene.transition_prompt, + "chain_type": scene.chain_type, + "source": scene.source, + } + for scene in scenes + ], + } + + +@router.post("/{vid}/script-import", response_model=ScriptImportResponse) +async def import_script(vid: str, body: ScriptImportRequest): + if not body.scenes: + raise HTTPException(400, "Script must include at least one scene") + if not body.replace_existing: + raise HTTPException(400, "Only replace_existing=true is supported") + + sdk_video = await _repo.get_video(vid) + if not sdk_video: + raise HTTPException(404, "Video not found") + project = await _repo.get_project(sdk_video.project_id) + if not project: + raise HTTPException(404, "Project not found") + + normalized: list[dict[str, Any]] = [] + for i, scene in enumerate(body.scenes): + prompt = _coalesce_script_text( + scene.prompt, + scene.image_prompt, + scene.video_prompt, + scene.narrator_text, + ) + if not prompt: + raise HTTPException(400, f"Scene #{i + 1} is missing prompt/image_prompt/video_prompt/narrator_text") + normalized.append( + { + "order": scene.display_order if scene.display_order is not None else i, + "prompt": prompt, + "image_prompt": _coalesce_script_text(scene.image_prompt) or None, + "video_prompt": _coalesce_script_text(scene.video_prompt) or None, + "narrator_text": _coalesce_script_text(scene.narrator_text) or None, + "character_names": _parse_character_names(scene.character_names) or None, + "transition_prompt": _coalesce_script_text(scene.transition_prompt) or None, + "chain_type": scene.chain_type or "ROOT", + "source": scene.source or "root", + } + ) + + # Stable sort by requested order then original index, then compact to 0..N-1. + normalized = [item for _, item in sorted(enumerate(normalized), key=lambda pair: (pair[1]["order"], pair[0]))] + for idx, item in enumerate(normalized): + item["display_order"] = idx + + existing_scenes = await _repo.list_scenes(vid) + deleted_scenes = 0 + for scene in existing_scenes: + ok = await _repo.delete("scene", scene.id) + if ok: + deleted_scenes += 1 + + deleted_requests = 0 + if body.clear_requests: + req_rows = await _repo.list("request", video_id=vid) + for row in req_rows: + rid = row.get("id") + if rid and await _repo.delete("request", rid): + deleted_requests += 1 + + for scene in normalized: + await _repo.create_scene( + video_id=vid, + display_order=scene["display_order"], + prompt=scene["prompt"], + image_prompt=scene["image_prompt"], + video_prompt=scene["video_prompt"], + transition_prompt=scene["transition_prompt"], + character_names=scene["character_names"], + chain_type=scene["chain_type"], + source=scene["source"], + narrator_text=scene["narrator_text"], + ) + + # Reset derived/exported media for this video after script replacement. + update_data: dict[str, Any] = { + "status": "DRAFT", + "vertical_url": None, + "horizontal_url": None, + "thumbnail_url": None, + "duration": None, + "resolution": None, + } + target_title = _coalesce_script_text(body.title, body.video.title if body.video else None) + target_desc = _coalesce_script_text(body.description, body.video.description if body.video else None) + target_ori_raw = _coalesce_script_text(body.orientation, body.video.orientation if body.video else None) + if target_title: + update_data["title"] = target_title + if target_desc: + update_data["description"] = target_desc + if target_ori_raw: + update_data["orientation"] = normalize_orientation( + target_ori_raw, + default=sdk_video.orientation or project.orientation or "VERTICAL", + ) + row = await _repo.update("video", vid, **update_data) + if not row: + raise HTTPException(404, "Video not found") + updated_video = _repo._row_to_video(row) + + await event_bus.emit( + "video_updated", + { + "id": updated_video.id, + "project_id": updated_video.project_id, + "orientation": updated_video.orientation, + }, + ) + await event_bus.emit( + "scene_updated", + { + "id": "script-import", + "video_id": vid, + "display_order": 0, + }, + ) + + return ScriptImportResponse( + ok=True, + video_id=vid, + scenes_total=len(normalized), + deleted_scenes=deleted_scenes, + deleted_requests=deleted_requests, + orientation=updated_video.orientation or project.orientation or "VERTICAL", + title=updated_video.title, + ) + + +@router.post("/{vid}/concat", response_model=ConcatResponse) +async def concat_video(vid: str, body: ConcatRequest): + sdk_video = await _repo.get_video(vid) + if not sdk_video: + raise HTTPException(404, "Video not found") + + project = await _repo.get_project(sdk_video.project_id) + if not project: + raise HTTPException(404, "Project not found") + + if body.project_id and body.project_id != sdk_video.project_id: + raise HTTPException(400, "project_id does not match this video") + + orientation = normalize_orientation(body.orientation or sdk_video.orientation or project.orientation or "VERTICAL") + if orientation not in ("VERTICAL", "HORIZONTAL"): + raise HTTPException(400, "orientation must be VERTICAL or HORIZONTAL") + + scenes = sorted(await _repo.list_scenes(vid), key=lambda s: s.display_order) + if not scenes: + raise HTTPException(400, "No scenes found") + + project_slug = slugify(project.name) or f"project_{project.id[:8]}" + out_dir = OUTPUT_DIR / project_slug + (out_dir / "4k").mkdir(parents=True, exist_ok=True) + (out_dir / "narrated").mkdir(parents=True, exist_ok=True) + (out_dir / "trimmed").mkdir(parents=True, exist_ok=True) + (out_dir / "norm").mkdir(parents=True, exist_ok=True) + + # Resolve all scene sources first so we fail early if any scene is missing. + source_paths: list[Path] = [] + for scene in scenes: + try: + source_paths.append(await _resolve_scene_source(scene, orientation, project_slug)) + except RuntimeError as e: + raise HTTPException(400, str(e)) from e + + # Choose output resolution. + if body.force_4k: + width, height = ((2160, 3840) if orientation == "VERTICAL" else (3840, 2160)) + else: + first_res = _probe_resolution(source_paths[0]) + if not first_res: + width, height = ((1080, 1920) if orientation == "VERTICAL" else (1920, 1080)) + else: + width, height = first_res + + normalized_paths: list[str] = [] + narrator_buffer = max(0.0, body.narrator_buffer) + for scene, source in zip(scenes, source_paths): + processing_source = source + tts_path = scene_tts_path(project_slug, scene.display_order, scene.id) + if not tts_path.exists(): + legacy_tts = out_dir / "tts" / f"{scene.id}.wav" + if legacy_tts.exists(): + tts_path = legacy_tts + + if body.fit_narrator and tts_path.exists(): + tts_duration = _probe_duration(tts_path) + video_duration = _probe_duration(processing_source) + if tts_duration and video_duration: + cut_duration = min(video_duration, tts_duration + narrator_buffer) + if cut_duration > 0.05: + trimmed = out_dir / "trimmed" / f"scene_{scene.display_order:03d}_{scene.id}.mp4" + ok_trim = await asyncio.to_thread( + trim_video, + str(processing_source), + str(trimmed), + 0.0, + cut_duration, + ) + if not ok_trim: + raise HTTPException(500, f"Failed to trim scene {scene.display_order + 1} to narrator duration") + processing_source = trimmed + + if body.with_narrator: + if tts_path.exists(): + narrated = out_dir / "narrated" / f"scene_{scene.display_order:03d}_{scene.id}.mp4" + ok_mix = await asyncio.to_thread( + add_narration, + str(processing_source), + str(tts_path), + str(narrated), + replace_original=True, + ) + if not ok_mix: + raise HTTPException(500, f"Failed to apply narration track for scene {scene.display_order + 1}") + processing_source = narrated + + normalized = out_dir / "norm" / f"scene_{scene.display_order:03d}_{scene.id}.mp4" + ok_norm = await asyncio.to_thread(_normalize_clip, processing_source, normalized, width, height) + if not ok_norm: + raise HTTPException(500, f"Failed to normalize scene {scene.display_order + 1}") + normalized_paths.append(str(normalized)) + + merged_output = out_dir / f"{project_slug}_final_{orientation.lower()}.mp4" + ok_merge = await asyncio.to_thread(merge_videos, normalized_paths, str(merged_output)) + if not ok_merge: + raise HTTPException(500, "Failed to concatenate scene videos") + + final_output = merged_output + if body.with_music: + music_path = _find_music_file(project_slug) + if not music_path: + raise HTTPException(400, "with_music=true but no music file found (expected output//music or output/_shared/music)") + mixed_output = out_dir / f"{project_slug}_final_{orientation.lower()}_music.mp4" + ok_music = await asyncio.to_thread( + add_music, + str(merged_output), + str(music_path), + str(mixed_output), + ) + if not ok_music: + raise HTTPException(500, "Failed to add background music") + final_output = mixed_output + + export_dir_str: str | None = None + exported_images = 0 + exported_videos = 0 + failed_assets = 0 + if body.export_assets and body.export_root_dir: + try: + export_root = Path(body.export_root_dir).expanduser().resolve() + video_slug = slugify(sdk_video.title) or "video" + video_order = int(sdk_video.display_order or 0) + 1 + project_export_dir = export_root / project_slug / f"video_{video_order:03d}_{video_slug}" + final_dir = project_export_dir / "final" + final_dir.mkdir(parents=True, exist_ok=True) + final_target = final_dir / f"final_{orientation.lower()}.mp4" + if not await _copy_file(final_output, final_target): + raise RuntimeError("Failed copying final video to export folder") + + exported_images, exported_videos, failed_assets = await _export_scene_assets( + scenes=scenes, + orientation=orientation, + project_slug=project_slug, + export_dir=project_export_dir, + ) + export_dir_str = str(project_export_dir) + except Exception as e: + raise HTTPException(400, f"Export assets failed: {e}") from e + + update_fields = { + "orientation": orientation, + "resolution": f"{width}x{height}", + } + if orientation == "VERTICAL": + update_fields["vertical_url"] = str(final_output) + else: + update_fields["horizontal_url"] = str(final_output) + await _repo.update("video", vid, **update_fields) + + await event_bus.emit("video_concatenated", { + "id": vid, + "project_id": sdk_video.project_id, + "orientation": orientation, + "output_path": str(final_output), + "export_dir": export_dir_str, + }) + + return ConcatResponse( + output_path=str(final_output), + scenes=len(scenes), + orientation=orientation, + resolution=f"{width}x{height}", + with_narrator=body.with_narrator, + with_music=body.with_music, + fit_narrator=body.fit_narrator, + narrator_buffer=narrator_buffer, + export_dir=export_dir_str, + exported_images=exported_images, + exported_videos=exported_videos, + failed_assets=failed_assets, + ) + + +@router.post("/{vid}/recompact") +async def recompact_scenes(vid: str): + """Re-number scene display_order sequentially (0,1,2,...) to fix gaps.""" + scenes = await _repo.list_scenes(vid) + if not scenes: + raise HTTPException(404, "No scenes found for this video") + sorted_scenes = sorted(scenes, key=lambda s: s.display_order) + updated = 0 + for i, scene in enumerate(sorted_scenes): + if scene.display_order != i: + await _repo.update("scene", scene.id, display_order=i) + updated += 1 + return {"total": len(sorted_scenes), "reordered": updated} diff --git a/agent/api/workflows.py b/agent/api/workflows.py new file mode 100644 index 0000000..de74f57 --- /dev/null +++ b/agent/api/workflows.py @@ -0,0 +1,1647 @@ +"""Workflow helper endpoints for CLI parity features.""" +from __future__ import annotations + +import json +import logging +import re +import shutil +import subprocess +import tempfile +from datetime import datetime, timezone +from pathlib import Path +from typing import Any, Literal, Optional +from urllib.parse import quote + +import aiohttp +from fastapi import APIRouter, HTTPException, Query +from pydantic import BaseModel, Field + +from agent.config import BASE_DIR, OUTPUT_DIR +from agent.db import crud +from agent.sdk.persistence.sqlite_repository import SQLiteRepository +from agent.utils.orientation import normalize_orientation +from agent.utils.paths import resolve_4k_file, scene_tts_path +from agent.utils.slugify import slugify +from agent.services.video_reviewer import review_video +from agent.services.local_upscaler import local_upscale_health as get_local_upscale_health + +router = APIRouter(prefix="/workflows", tags=["workflows"]) +logger = logging.getLogger(__name__) +_repo = SQLiteRepository() + +_DATE_PATTERNS = [ + re.compile(r"\b\d{1,2}[/-]\d{1,2}(?:[/-]\d{2,4})?\b"), + re.compile(r"\b\d{4}[/-]\d{1,2}[/-]\d{1,2}\b"), + re.compile( + r"\b\d{1,2}\s+(?:th[áa]ng|tháng|month|apr|march|january|february|june|july|august|september|october|november|december)\s+\d{2,4}\b", + re.IGNORECASE, + ), +] +_COST_PATTERN = re.compile( + r"(\$\s?\d[\d,\.]*|\d[\d,\.]*\s*(?:usd|vnd|triệu|tỷ|million|billion))", + re.IGNORECASE, +) +_STAT_PATTERN = re.compile( + r"(\d[\d,\.]*\s*(?:%|km|m|mi|người|people|casualties|lính|scene|fps|hours|minutes|s|sec))", + re.IGNORECASE, +) +_NAME_PATTERN = re.compile(r"\b([A-ZÀ-Ý][\wÀ-ỹ-]*(?:\s+[A-ZÀ-Ý][\wÀ-ỹ-]*){1,3})\b") +_VI_MARKER = re.compile(r"[ăâđêôơưĂÂĐÊÔƠƯáàảãạắằẳẵặấầẩẫậéèẻẽẹếềểễệíìỉĩịóòỏõọốồổỗộớờởỡợúùủũụứừửữựýỳỷỹỵ]") +_ES_MARKER = re.compile(r"[ñáéíóúüÑÁÉÍÓÚÜ]") + + +class TextOverlayItem(BaseModel): + text: str + style: Literal["date", "name", "stat", "cost"] + + +class GenerateTextOverlaysRequest(BaseModel): + language: Optional[str] = Field(None, max_length=8, description="vi, en, es... (auto-detect if omitted)") + + +class GenerateTextOverlaysResponse(BaseModel): + project_id: str + video_id: str + language: str + scenes_total: int + scenes_with_overlays: int + items_total: int + output_path: str + overlays: dict[str, list[TextOverlayItem]] + + +class BrandLogoRequest(BaseModel): + channel_name: str = Field(..., min_length=1, max_length=120) + project_id: Optional[str] = None + video_id: Optional[str] = None + video_path: Optional[str] = None + output_path: Optional[str] = None + size: Optional[int] = Field(None, ge=64, le=512) + apply_thumbnails: bool = False + include_intro: bool = True + include_outro: bool = True + + +class BrandLogoResponse(BaseModel): + output_path: str + width: int + height: int + logo_size: int + logo_padding: int + intro_used: Optional[str] = None + outro_used: Optional[str] = None + badge_4k_applied: bool = False + thumbnails: list[str] = Field(default_factory=list) + + +class DownloadUpscalesRequest(BaseModel): + project_id: Optional[str] = None + orientation: Optional[str] = None + overwrite: bool = False + + +class DownloadUpscalesResponse(BaseModel): + project_id: str + video_id: str + orientation: str + output_dir: str + downloaded: list[str] + skipped: list[str] + failed: list[str] + + +class SmartContinueRequest(BaseModel): + project_id: Optional[str] = None + orientation: Optional[str] = None + include_upscale: bool = True + include_tts: bool = False + include_concat: bool = False + auto_download_upscales: bool = False + fit_narrator: bool = True + narrator_buffer: float = Field(default=0.5, ge=0, le=6) + tts_template: Optional[str] = None + review_before_upscale: bool = True + review_mode: Literal["light", "deep"] = "light" + review_threshold: float = Field(default=7.5, ge=0, le=10) + max_review_regens: int = Field(default=12, ge=1, le=200) + low_score_regen_image_threshold: float = Field(default=4.0, ge=0, le=10) + + +class SmartContinueResponse(BaseModel): + project_id: str + video_id: str + orientation: str + action: str + message: str + queued_requests: int = 0 + requested_types: list[str] = Field(default_factory=list) + review: Optional[dict[str, Any]] = None + downloaded: Optional[dict[str, int]] = None + concat_output: Optional[str] = None + + +class ResearchSource(BaseModel): + title: str + url: str + snippet: str + + +class ResearchRequest(BaseModel): + topic: str = Field(..., min_length=2, max_length=300) + language: Optional[str] = Field(default="vi", max_length=8) + limit: int = Field(default=3, ge=1, le=8) + + +class ResearchResponse(BaseModel): + topic: str + language: str + summary: str + key_facts: list[str] + suggested_story_angle: str + sources: list[ResearchSource] + output_path: str + + +class YouTubeReferenceRequest(BaseModel): + url: str = Field(..., min_length=8, max_length=1000) + language: Optional[str] = Field(default="vi", max_length=16) + max_chars: int = Field(default=12000, ge=1500, le=40000) + + +class YouTubeReferenceResponse(BaseModel): + url: str + video_id: str + title: str + channel: Optional[str] = None + duration_sec: Optional[int] = None + upload_date: Optional[str] = None + transcript_language: str + caption_type: Literal["subtitles", "automatic_captions"] + transcript_chars: int + transcript_truncated: bool + transcript: str + + +@router.get("/local-upscale/health") +async def local_upscale_health(): + """Check local 4K upscaler dependencies (ffmpeg + Real-ESRGAN).""" + return get_local_upscale_health() + + +def _obj(item, key: str, default=None): + if isinstance(item, dict): + return item.get(key, default) + return getattr(item, key, default) + + +def _clip_text(text: str, max_len: int = 40) -> str: + compact = " ".join((text or "").strip().split()) + if len(compact) <= max_len: + return compact + return compact[: max_len - 1].rstrip() + "…" + + +def _detect_language(text: str) -> str: + if _VI_MARKER.search(text): + return "vi" + if _ES_MARKER.search(text): + return "es" + return "en" + + +def _extract_with_patterns(patterns: list[re.Pattern], text: str) -> str | None: + for p in patterns: + m = p.search(text) + if m: + return m.group(0) + return None + + +def _extract_overlay_candidates(text: str) -> list[TextOverlayItem]: + if not text or not text.strip(): + return [] + + out: list[TextOverlayItem] = [] + seen: set[str] = set() + + date_text = _extract_with_patterns(_DATE_PATTERNS, text) + if date_text: + normalized = _clip_text(date_text) + out.append(TextOverlayItem(text=normalized, style="date")) + seen.add(normalized.lower()) + + cost_match = _COST_PATTERN.search(text) + if cost_match: + val = _clip_text(cost_match.group(1)) + if val.lower() not in seen: + out.append(TextOverlayItem(text=val, style="cost")) + seen.add(val.lower()) + + stat_match = _STAT_PATTERN.search(text) + if stat_match: + val = _clip_text(stat_match.group(1)) + if val.lower() not in seen: + out.append(TextOverlayItem(text=val, style="stat")) + seen.add(val.lower()) + + name_match = _NAME_PATTERN.search(text) + if name_match: + val = _clip_text(name_match.group(1)) + if val.lower() not in seen: + out.append(TextOverlayItem(text=val, style="name")) + seen.add(val.lower()) + + if not out: + first_sentence = re.split(r"[.!?]\s+", text.strip())[0] + if first_sentence: + out.append(TextOverlayItem(text=_clip_text(first_sentence), style="name")) + + return out[:2] + + +def _strip_html(text: str) -> str: + return re.sub(r"<[^>]+>", "", text or "").replace(""", "\"").replace("&", "&").strip() + + +def _extract_facts_from_text(text: str, limit: int = 5) -> list[str]: + chunks = re.split(r"(?<=[.!?])\s+", text or "") + facts: list[str] = [] + for c in chunks: + t = " ".join(c.split()).strip() + if len(t) < 20: + continue + # Prefer concrete statements with numbers/dates. + if re.search(r"\d", t): + facts.append(t) + if len(facts) >= limit: + break + if not facts: + for c in chunks: + t = " ".join(c.split()).strip() + if len(t) >= 24: + facts.append(t) + if len(facts) >= limit: + break + return facts[:limit] + + +def _run_cmd(cmd: list[str], timeout: int = 300): + result = subprocess.run(cmd, capture_output=True, text=True, timeout=timeout) + if result.returncode != 0: + stderr = (result.stderr or "")[-600:] + raise HTTPException(500, f"Command failed: {' '.join(cmd[:3])}... {stderr}") + return result + + +def _normalize_yt_lang(lang: str) -> str: + return (lang or "en").strip().lower().replace("_", "-") + + +def _lang_candidates(lang: str) -> list[str]: + normalized = _normalize_yt_lang(lang) + out = [normalized] + if "-" in normalized: + out.append(normalized.split("-", 1)[0]) + if "en" not in out: + out.append("en") + if "en-orig" not in out: + out.append("en-orig") + return out + + +def _pick_caption_track( + captions: dict[str, list[dict[str, Any]]] | None, + preferred_lang: str, +) -> tuple[str, dict[str, Any]] | None: + if not captions: + return None + keys = list(captions.keys()) + if not keys: + return None + + def find_key(candidates: list[str]) -> str | None: + for cand in candidates: + for key in keys: + if key.lower() == cand: + return key + for cand in candidates: + for key in keys: + if key.lower().startswith(cand + "-"): + return key + return None + + chosen_key = find_key(_lang_candidates(preferred_lang)) or keys[0] + entries = captions.get(chosen_key) or [] + if not entries: + return None + + rank = {"json3": 0, "srv3": 1, "vtt": 2, "ttml": 3, "srt": 4} + chosen_entry = sorted( + (e for e in entries if isinstance(e, dict) and e.get("url")), + key=lambda e: rank.get(str(e.get("ext", "")).lower(), 9), + ) + if not chosen_entry: + return None + return chosen_key, chosen_entry[0] + + +def _parse_json3_transcript(payload: dict[str, Any]) -> str: + events = payload.get("events") or [] + lines: list[str] = [] + for event in events: + if not isinstance(event, dict): + continue + segs = event.get("segs") or [] + if not isinstance(segs, list): + continue + raw = "".join(str(seg.get("utf8", "")) for seg in segs if isinstance(seg, dict)) + line = " ".join(raw.replace("\n", " ").split()).strip() + if not line: + continue + if lines and lines[-1] == line: + continue + lines.append(line) + return " ".join(lines).strip() + + +def _parse_text_transcript(raw: str) -> str: + lines: list[str] = [] + for ln in (raw or "").splitlines(): + line = ln.strip() + if not line: + continue + if line.upper().startswith("WEBVTT"): + continue + if "-->" in line: + continue + if re.fullmatch(r"\d+", line): + continue + line = _strip_html(line) + line = " ".join(line.split()) + if not line: + continue + if lines and lines[-1] == line: + continue + lines.append(line) + return " ".join(lines).strip() + + +def _trim_transcript(text: str, max_chars: int) -> tuple[str, bool]: + clean = " ".join((text or "").split()).strip() + if len(clean) <= max_chars: + return clean, False + head = int(max_chars * 0.78) + tail = max(200, max_chars - head - 26) + trimmed = f"{clean[:head].rstrip()} ...[truncated]... {clean[-tail:].lstrip()}" + return trimmed, True + + +def _format_upload_date(raw: str | None) -> str | None: + if not raw: + return None + if re.fullmatch(r"\d{8}", raw): + return f"{raw[0:4]}-{raw[4:6]}-{raw[6:8]}" + return raw + + +async def _download_to_file(url: str, output_path: Path) -> bool: + output_path.parent.mkdir(parents=True, exist_ok=True) + try: + connector = aiohttp.TCPConnector(ssl=False) + timeout = aiohttp.ClientTimeout(total=180) + async with aiohttp.ClientSession(connector=connector, timeout=timeout) as session: + async with session.get(url) as resp: + if resp.status != 200: + return False + output_path.write_bytes(await resp.read()) + return True + except Exception: + return False + + +def _probe_resolution(path: Path) -> tuple[int, int]: + cmd = [ + "ffprobe", + "-v", + "error", + "-select_streams", + "v:0", + "-show_entries", + "stream=width,height", + "-of", + "csv=p=0:s=x", + str(path), + ] + out = _run_cmd(cmd, timeout=30).stdout.strip() + if "x" not in out: + raise HTTPException(500, f"Cannot read video resolution: {path}") + w_str, h_str = out.split("x", 1) + return int(w_str), int(h_str) + + +def _normalize_video(input_path: Path, output_path: Path, width: int, height: int): + output_path.parent.mkdir(parents=True, exist_ok=True) + cmd = [ + "ffmpeg", + "-y", + "-i", + str(input_path), + "-vf", + f"scale={width}:{height}:force_original_aspect_ratio=decrease,pad={width}:{height}:(ow-iw)/2:(oh-ih)/2", + "-c:v", + "libx264", + "-preset", + "fast", + "-crf", + "18", + "-r", + "24", + "-pix_fmt", + "yuv420p", + "-c:a", + "aac", + "-b:a", + "192k", + "-ar", + "48000", + "-ac", + "2", + "-movflags", + "+faststart", + str(output_path), + ] + _run_cmd(cmd, timeout=900) + + +def _concat_videos(parts: list[Path], output_path: Path): + with tempfile.NamedTemporaryFile(mode="w", suffix=".txt", delete=False) as f: + for p in parts: + safe = str(p).replace("'", "'\\''") + f.write(f"file '{safe}'\n") + concat_list = Path(f.name) + try: + cmd = [ + "ffmpeg", + "-y", + "-f", + "concat", + "-safe", + "0", + "-i", + str(concat_list), + "-c", + "copy", + "-movflags", + "+faststart", + str(output_path), + ] + _run_cmd(cmd, timeout=900) + finally: + concat_list.unlink(missing_ok=True) + + +def _overlay_icon(input_video: Path, icon_path: Path, output_video: Path, size: int, pad: int, top_right: bool = False): + icon_scaled = f"[1:v]scale={size}:{size},format=rgba[icon]" + pos = f"W-w-{pad}:{pad}" if top_right else f"W-w-{pad}:H-h-{pad}" + cmd = [ + "ffmpeg", + "-y", + "-i", + str(input_video), + "-i", + str(icon_path), + "-filter_complex", + f"{icon_scaled};[0:v][icon]overlay={pos}", + "-c:v", + "libx264", + "-preset", + "fast", + "-crf", + "18", + "-r", + "24", + "-pix_fmt", + "yuv420p", + "-c:a", + "copy", + "-movflags", + "+faststart", + str(output_video), + ] + _run_cmd(cmd, timeout=900) + + +def _resolve_video_path(video_path: Optional[str], project_slug: str, orientation: str, video_obj) -> Path: + if video_path: + p = Path(video_path).expanduser().resolve() + if p.exists(): + return p + raise HTTPException(400, f"video_path not found: {video_path}") + + candidates = [ + OUTPUT_DIR / project_slug / f"{project_slug}_final_{orientation.lower()}_music.mp4", + OUTPUT_DIR / project_slug / f"{project_slug}_final_{orientation.lower()}.mp4", + OUTPUT_DIR / project_slug / f"{project_slug}_final_vertical_music.mp4", + OUTPUT_DIR / project_slug / f"{project_slug}_final_vertical.mp4", + OUTPUT_DIR / project_slug / f"{project_slug}_final_horizontal_music.mp4", + OUTPUT_DIR / project_slug / f"{project_slug}_final_horizontal.mp4", + ] + for candidate in candidates: + if candidate.exists(): + return candidate + + for key in ("vertical_url", "horizontal_url"): + raw = _obj(video_obj, key) + if not raw or str(raw).startswith("http"): + continue + p = Path(raw) + if p.exists(): + return p.resolve() + + raise HTTPException(400, "Cannot infer final video path. Run concat first or provide video_path.") + + +def _pick_first_existing(paths: list[Path]) -> Path | None: + for p in paths: + if p.exists(): + return p + return None + + +async def _enqueue_request_if_needed( + *, + req_type: str, + project_id: str, + orientation: str, + video_id: Optional[str] = None, + scene_id: Optional[str] = None, + character_id: Optional[str] = None, + source_media_id: Optional[str] = None, +) -> bool: + if scene_id: + existing = await crud.list_requests(scene_id=scene_id) + for r in existing: + if r.get("type") == req_type and r.get("status") in ("PENDING", "PROCESSING"): + return False + if character_id: + existing = await crud.list_requests(project_id=project_id) + for r in existing: + if ( + r.get("character_id") == character_id + and r.get("type") == req_type + and r.get("status") in ("PENDING", "PROCESSING") + ): + return False + + if video_id and orientation: + await crud.update_video(video_id, orientation=orientation) + + await crud.create_request( + req_type=req_type, + project_id=project_id, + video_id=video_id, + scene_id=scene_id, + character_id=character_id, + orientation=orientation, + source_media_id=source_media_id, + ) + return True + + +@router.post("/research", response_model=ResearchResponse) +async def research_topic(body: ResearchRequest): + """Fact-check topic from web sources and persist to .omc/research (fk:research parity).""" + topic = body.topic.strip() + if not topic: + raise HTTPException(400, "topic is required") + lang = (body.language or "vi").strip().lower() + if lang.startswith("vi"): + wiki_lang = "vi" + elif lang.startswith("es"): + wiki_lang = "es" + else: + wiki_lang = "en" + + search_url = f"https://{wiki_lang}.wikipedia.org/w/api.php" + params = { + "action": "query", + "list": "search", + "srsearch": topic, + "utf8": 1, + "format": "json", + "srlimit": body.limit, + } + + try: + connector = aiohttp.TCPConnector(ssl=False) + timeout = aiohttp.ClientTimeout(total=60) + async with aiohttp.ClientSession(connector=connector, timeout=timeout) as session: + async with session.get(search_url, params=params) as resp: + if resp.status != 200: + raise HTTPException(502, f"Research search failed (HTTP {resp.status})") + raw_search = await resp.json() + + rows = raw_search.get("query", {}).get("search", []) or [] + if not rows: + raise HTTPException(404, f"No research results for topic: {topic}") + + sources: list[ResearchSource] = [] + summaries: list[str] = [] + for row in rows[: body.limit]: + title = (row.get("title") or "").strip() + if not title: + continue + snippet = _strip_html(row.get("snippet") or "") + summary_url = f"https://{wiki_lang}.wikipedia.org/api/rest_v1/page/summary/{quote(title)}" + async with session.get(summary_url) as s_resp: + if s_resp.status != 200: + continue + sd = await s_resp.json() + extract = (sd.get("extract") or "").strip() + page = (sd.get("content_urls", {}).get("desktop", {}).get("page") or "").strip() + if not page: + page = f"https://{wiki_lang}.wikipedia.org/wiki/{quote(title)}" + sources.append(ResearchSource(title=title, url=page, snippet=snippet or _clip_text(extract, 140))) + if extract: + summaries.append(extract) + + if not sources: + raise HTTPException(404, f"No valid research summaries for topic: {topic}") + + merged_summary = "\n\n".join(summaries[:3]).strip() + key_facts = _extract_facts_from_text(merged_summary, limit=6) + if not key_facts: + key_facts = [s.snippet for s in sources[:5] if s.snippet] + angle = ( + "Kể theo nhịp: bối cảnh ban đầu → bước ngoặt chính → hệ quả dài hạn, " + "nhấn mạnh mốc thời gian và nhân vật then chốt." + ) + + out_dir = BASE_DIR / ".omc" / "research" + out_dir.mkdir(parents=True, exist_ok=True) + ts = datetime.now(timezone.utc).strftime("%Y%m%d_%H%M%S") + out_path = out_dir / f"{ts}_{slugify(topic)[:96]}.md" + lines = [ + f"# Research: {topic}", + "", + f"- Time (UTC): {datetime.now(timezone.utc).isoformat()}", + f"- Language: {lang}", + "", + "## Summary", + merged_summary or "(No summary)", + "", + "## Key Facts", + ] + for fact in key_facts: + lines.append(f"- {fact}") + lines.extend(["", "## Sources"]) + for s in sources: + lines.append(f"- {s.title}: {s.url}") + out_path.write_text("\n".join(lines), encoding="utf-8") + + return ResearchResponse( + topic=topic, + language=lang, + summary=merged_summary, + key_facts=key_facts, + suggested_story_angle=angle, + sources=sources, + output_path=str(out_path), + ) + except HTTPException: + raise + except Exception as e: + logger.exception("Research workflow failed") + raise HTTPException(500, f"Research failed: {e}") + + +@router.post("/youtube-reference", response_model=YouTubeReferenceResponse) +async def youtube_reference(body: YouTubeReferenceRequest): + """Extract transcript + metadata from a YouTube URL for script cloning/adaptation flow.""" + url = body.url.strip() + if not re.match(r"^https?://", url, re.IGNORECASE): + raise HTTPException(400, "url must be a valid http(s) URL") + if ("youtube.com" not in url.lower()) and ("youtu.be" not in url.lower()): + raise HTTPException(400, "Only YouTube URLs are supported") + + if shutil.which("yt-dlp") is None: + raise HTTPException(500, "yt-dlp is not installed in this environment") + + try: + cmd = ["yt-dlp", "--dump-single-json", "--skip-download", "--no-warnings", "--", url] + result = _run_cmd(cmd, timeout=180) + info = json.loads(result.stdout or "{}") + except HTTPException: + raise + except Exception as e: + raise HTTPException(502, f"Failed to fetch YouTube metadata: {e}") from e + + video_id = str(info.get("id") or "").strip() + title = str(info.get("title") or "").strip() + if not video_id or not title: + raise HTTPException(502, "Could not parse video metadata from YouTube URL") + + preferred_lang = (body.language or "vi").strip().lower() + subtitles = info.get("subtitles") or {} + automatic = info.get("automatic_captions") or {} + + picked = _pick_caption_track(subtitles, preferred_lang) + caption_type: Literal["subtitles", "automatic_captions"] = "subtitles" + if not picked: + picked = _pick_caption_track(automatic, preferred_lang) + caption_type = "automatic_captions" + if not picked: + raise HTTPException( + 422, + "No transcript/captions found for this video. Try another video with subtitles.", + ) + + track_lang, track = picked + caption_url = str(track.get("url") or "").strip() + track_ext = str(track.get("ext") or "json3").strip().lower() + if not caption_url: + raise HTTPException(502, "Caption track URL is missing") + + try: + connector = aiohttp.TCPConnector(ssl=False) + timeout = aiohttp.ClientTimeout(total=90) + async with aiohttp.ClientSession(connector=connector, timeout=timeout) as session: + async with session.get(caption_url) as resp: + if resp.status != 200: + raise HTTPException(502, f"Failed downloading transcript (HTTP {resp.status})") + payload_text = await resp.text() + except HTTPException: + raise + except Exception as e: + raise HTTPException(502, f"Failed downloading transcript: {e}") from e + + transcript = "" + if track_ext == "json3": + try: + payload = json.loads(payload_text or "{}") + transcript = _parse_json3_transcript(payload) + except Exception: + transcript = _parse_text_transcript(payload_text) + else: + transcript = _parse_text_transcript(payload_text) + + if not transcript: + raise HTTPException(422, "Transcript is empty or unavailable for this video") + + transcript, was_truncated = _trim_transcript(transcript, body.max_chars) + + duration_raw = info.get("duration") + duration_sec = int(duration_raw) if isinstance(duration_raw, (int, float)) else None + channel = str(info.get("channel") or info.get("uploader") or "").strip() or None + upload_date = _format_upload_date(str(info.get("upload_date") or "").strip() or None) + + return YouTubeReferenceResponse( + url=url, + video_id=video_id, + title=title, + channel=channel, + duration_sec=duration_sec, + upload_date=upload_date, + transcript_language=track_lang, + caption_type=caption_type, + transcript_chars=len(transcript), + transcript_truncated=was_truncated, + transcript=transcript, + ) + + +@router.get("/channels") +async def list_channels(): + channels_dir = BASE_DIR / "youtube" / "channels" + if not channels_dir.exists(): + return [] + rows = [] + for d in sorted(channels_dir.iterdir(), key=lambda p: p.name.lower()): + if not d.is_dir(): + continue + icon = d / f"{d.name}_icon.png" + rows.append( + { + "name": d.name, + "icon_exists": icon.exists(), + "intro_exists": any((d / f).exists() for f in ("intro_4k_2x.mp4", "intro_4k.mp4", "intro_1080.mp4")), + "outro_exists": any((d / f).exists() for f in ("outro_4k.mp4", "outro_1080.mp4")), + "badge_4k_exists": (d / "4k_icon.png").exists(), + } + ) + return rows + + +@router.get("/status") +async def workflow_status( + project_id: Optional[str] = Query(None), + video_id: Optional[str] = Query(None), +): + """Aggregated status dashboard (fk:status + fk:monitor parity).""" + if not project_id: + projects = await crud.list_projects() + out = [] + for p in projects: + pid = _obj(p, "id") + videos = await _repo.list_videos(pid) + out.append( + { + "id": pid, + "name": _obj(p, "name"), + "status": _obj(p, "status"), + "tier": _obj(p, "user_paygate_tier"), + "orientation": normalize_orientation(_obj(p, "orientation")), + "material": _obj(p, "material"), + "video_count": len(videos), + "created_at": _obj(p, "created_at"), + } + ) + return {"projects": out, "count": len(out)} + + project = await _repo.get_project(project_id) + if not project: + raise HTTPException(404, "Project not found") + + characters = await _repo.get_project_characters(project_id) + videos = await _repo.list_videos(project_id) + if not videos: + return { + "project": { + "id": _obj(project, "id"), + "name": _obj(project, "name"), + "status": _obj(project, "status"), + "material": _obj(project, "material"), + }, + "videos": [], + "message": "No videos in project", + } + + active_video = None + if video_id: + active_video = next((v for v in videos if _obj(v, "id") == video_id), None) + if not active_video: + active_video = videos[0] + + vid = _obj(active_video, "id") + scenes = sorted(await _repo.list_scenes(vid), key=lambda s: _obj(s, "display_order", 0)) + orientation = normalize_orientation( + _obj(active_video, "orientation") or _obj(project, "orientation") or "VERTICAL" + ) + prefix = orientation.lower() + project_slug = slugify(_obj(project, "name") or "project") + + refs_total = len(characters) + refs_done = sum(1 for c in characters if _obj(c, "media_id")) + scenes_total = len(scenes) + images_done = sum(1 for s in scenes if _obj(s, f"{prefix}_image_status") == "COMPLETED") + videos_done = sum(1 for s in scenes if _obj(s, f"{prefix}_video_status") == "COMPLETED") + upscales_done = sum(1 for s in scenes if _obj(s, f"{prefix}_upscale_status") == "COMPLETED") + tts_total = sum(1 for s in scenes if (_obj(s, "narrator_text") or "").strip()) + tts_done = sum( + 1 + for s in scenes + if (_obj(s, "narrator_text") or "").strip() + and scene_tts_path(project_slug, int(_obj(s, "display_order", 0)), _obj(s, "id")).exists() + ) + downloads_done = sum( + 1 + for s in scenes + if resolve_4k_file(project_slug, int(_obj(s, "display_order", 0)), _obj(s, "id")) is not None + ) + + if refs_total > refs_done: + next_action = "gen_refs" + elif scenes_total > images_done: + next_action = "gen_images" + elif scenes_total > videos_done: + next_action = "gen_videos" + elif scenes_total > upscales_done: + next_action = "review_or_upscale" + elif scenes_total > downloads_done: + next_action = "download_upscales" + elif tts_total > tts_done: + next_action = "gen_tts" + else: + next_action = "concat" + + pending = await crud.list_requests(project_id=project_id, status="PENDING") + processing = await crud.list_requests(project_id=project_id, status="PROCESSING") + failed = await crud.list_requests(project_id=project_id, status="FAILED") + + def scene_status_row(s): + order = int(_obj(s, "display_order", 0)) + narrator_text = _obj(s, "narrator_text") or "" + tts_path = scene_tts_path(project_slug, order, _obj(s, "id")) + tts_ready = bool(narrator_text.strip()) and tts_path.exists() + local_4k = resolve_4k_file(project_slug, order, _obj(s, "id")) + return { + "id": _obj(s, "id"), + "display_order": _obj(s, "display_order"), + "prompt": _obj(s, "prompt"), + "character_names": _obj(s, "character_names"), + "narrator_text": narrator_text, + "image_status": _obj(s, f"{prefix}_image_status"), + "video_status": _obj(s, f"{prefix}_video_status"), + "upscale_status": _obj(s, f"{prefix}_upscale_status"), + "tts_status": "COMPLETED" if tts_ready else ("PENDING" if narrator_text.strip() else "SKIPPED"), + "image_url": _obj(s, f"{prefix}_image_url"), + "video_url": _obj(s, f"{prefix}_video_url"), + "tts_audio_path": str(tts_path) if tts_ready else None, + "download_ready": local_4k is not None, + "download_path": str(local_4k) if local_4k is not None else None, + } + + return { + "project": { + "id": _obj(project, "id"), + "name": _obj(project, "name"), + "status": _obj(project, "status"), + "material": _obj(project, "material"), + }, + "video": { + "id": vid, + "title": _obj(active_video, "title"), + "orientation": orientation, + }, + "counts": { + "refs_done": refs_done, + "refs_total": refs_total, + "images_done": images_done, + "images_total": scenes_total, + "videos_done": videos_done, + "videos_total": scenes_total, + "upscales_done": upscales_done, + "upscales_total": scenes_total, + "tts_done": tts_done, + "tts_total": tts_total, + "downloads_done": downloads_done, + "downloads_total": scenes_total, + }, + "queue": { + "pending": len(pending), + "processing": len(processing), + "failed": len(failed), + }, + "characters": [ + { + "id": _obj(c, "id"), + "name": _obj(c, "name"), + "entity_type": _obj(c, "entity_type"), + "media_id": _obj(c, "media_id"), + "reference_image_url": _obj(c, "reference_image_url"), + "ready": bool(_obj(c, "media_id")), + } + for c in characters + ], + "scenes": [scene_status_row(s) for s in scenes], + "suggested_next_action": next_action, + } + + +@router.post("/videos/{video_id}/text-overlays", response_model=GenerateTextOverlaysResponse) +async def generate_text_overlays(video_id: str, body: GenerateTextOverlaysRequest): + """Generate text_overlays.json from narrator text (fk:gen-text-overlays parity).""" + video = await _repo.get_video(video_id) + if not video: + raise HTTPException(404, "Video not found") + project_id = _obj(video, "project_id") + project = await _repo.get_project(project_id) + if not project: + raise HTTPException(404, "Project not found") + + scenes = sorted(await _repo.list_scenes(video_id), key=lambda s: _obj(s, "display_order", 0)) + if not scenes: + raise HTTPException(400, "No scenes found for this video") + + joined_text = " ".join((_obj(s, "narrator_text") or "") for s in scenes) + language = (body.language or "").strip().lower() or _detect_language(joined_text) + if not language: + language = "en" + + candidates: list[tuple[int, list[TextOverlayItem]]] = [] + for scene in scenes: + text = (_obj(scene, "narrator_text") or "").strip() + if not text: + continue + extracted = _extract_overlay_candidates(text) + if extracted: + candidates.append((_obj(scene, "display_order", 0), extracted[:2])) + + target = max(1, round(len(scenes) * 0.45)) + selected = sorted(candidates, key=lambda x: (len(x[1]), -x[0]), reverse=True)[:target] + selected_orders = {order for order, _items in selected} + + overlays: dict[str, list[TextOverlayItem]] = {} + for order, items in candidates: + if order in selected_orders: + overlays[str(order)] = items[:2] + + project_slug = slugify(_obj(project, "name") or "project") + out_dir = OUTPUT_DIR / project_slug + out_dir.mkdir(parents=True, exist_ok=True) + out_path = out_dir / "text_overlays.json" + + raw_payload = { + key: [item.model_dump() for item in values] + for key, values in overlays.items() + } + out_path.write_text(json.dumps(raw_payload, indent=2, ensure_ascii=False)) + + items_total = sum(len(v) for v in overlays.values()) + return GenerateTextOverlaysResponse( + project_id=project_id, + video_id=video_id, + language=language, + scenes_total=len(scenes), + scenes_with_overlays=len(overlays), + items_total=items_total, + output_path=str(out_path), + overlays=overlays, + ) + + +@router.post("/brand-logo", response_model=BrandLogoResponse) +async def apply_brand_logo(body: BrandLogoRequest): + """Apply intro/outro + logo watermark + 4K badge (fk:brand-logo parity).""" + project = None + video = None + resolved_project_id = body.project_id + + if body.video_id: + video = await _repo.get_video(body.video_id) + if not video: + raise HTTPException(404, "Video not found") + resolved_project_id = _obj(video, "project_id") + + if resolved_project_id: + project = await _repo.get_project(resolved_project_id) + if not project: + raise HTTPException(404, "Project not found") + + if not project and not body.video_path: + raise HTTPException(400, "Provide project_id/video_id or explicit video_path") + + project_slug = slugify(_obj(project, "name") or "project") if project else "project" + orientation = normalize_orientation( + _obj(video, "orientation") if video else (_obj(project, "orientation") if project else "VERTICAL") + ) + source_video = _resolve_video_path(body.video_path, project_slug, orientation, video) + if not source_video.exists(): + raise HTTPException(404, f"Source video not found: {source_video}") + + channel_dir = BASE_DIR / "youtube" / "channels" / body.channel_name + if not channel_dir.exists(): + raise HTTPException(404, f"Channel not found: {channel_dir}") + logo = channel_dir / f"{body.channel_name}_icon.png" + if not logo.exists(): + raise HTTPException(400, f"Missing channel logo: {logo}") + + width, height = _probe_resolution(source_video) + if body.size is not None: + logo_size = body.size + elif width >= 3840: + logo_size = 220 + elif width >= 1920: + logo_size = 130 + else: + logo_size = 110 + logo_padding = 40 if width >= 3840 else 24 if width >= 1920 else 16 + + intro_used = None + outro_used = None + if width >= 3840: + intro_candidates = [channel_dir / "intro_4k_2x.mp4", channel_dir / "intro_4k.mp4", channel_dir / "intro_1080.mp4"] + outro_candidates = [channel_dir / "outro_4k.mp4", channel_dir / "outro_1080.mp4"] + else: + intro_candidates = [channel_dir / "intro_1080.mp4", channel_dir / "intro_4k.mp4"] + outro_candidates = [channel_dir / "outro_1080.mp4", channel_dir / "outro_4k.mp4"] + intro_file = _pick_first_existing(intro_candidates) if body.include_intro else None + outro_file = _pick_first_existing(outro_candidates) if body.include_outro else None + + if intro_file: + intro_used = str(intro_file) + if outro_file: + outro_used = str(outro_file) + + out_path = Path(body.output_path).expanduser().resolve() if body.output_path else source_video.with_name(f"{source_video.stem}_branded.mp4") + out_path.parent.mkdir(parents=True, exist_ok=True) + + thumbnails: list[str] = [] + badge_applied = False + + with tempfile.TemporaryDirectory(prefix="flowkit_brand_") as tmp: + tmpdir = Path(tmp) + main_norm = tmpdir / "main_norm.mp4" + _normalize_video(source_video, main_norm, width, height) + + parts = [main_norm] + if intro_file: + intro_norm = tmpdir / "intro_norm.mp4" + _normalize_video(intro_file, intro_norm, width, height) + parts.insert(0, intro_norm) + if outro_file: + outro_norm = tmpdir / "outro_norm.mp4" + _normalize_video(outro_file, outro_norm, width, height) + parts.append(outro_norm) + + merged = tmpdir / "merged.mp4" + _concat_videos(parts, merged) + + branded = tmpdir / "branded.mp4" + _overlay_icon(merged, logo, branded, logo_size, logo_padding, top_right=False) + + badge_4k = channel_dir / "4k_icon.png" + final_source = branded + if width >= 3840 and badge_4k.exists(): + with_badge = tmpdir / "branded_badge.mp4" + _overlay_icon(branded, badge_4k, with_badge, 180, 40, top_right=True) + final_source = with_badge + badge_applied = True + + out_path.write_bytes(final_source.read_bytes()) + + if body.apply_thumbnails and project: + thumbs_dir = OUTPUT_DIR / project_slug / "thumbnails" + if thumbs_dir.exists(): + for thumb in sorted(thumbs_dir.glob("*.png")): + if thumb.stem.endswith("_branded"): + continue + thumb_out = thumb.with_name(f"{thumb.stem}_branded.png") + cmd = [ + "ffmpeg", + "-y", + "-i", + str(thumb), + "-i", + str(logo), + "-filter_complex", + "[1:v]scale=72:72[icon];[0:v][icon]overlay=W-w-16:H-h-16", + str(thumb_out), + ] + _run_cmd(cmd, timeout=120) + thumbnails.append(str(thumb_out)) + + return BrandLogoResponse( + output_path=str(out_path), + width=width, + height=height, + logo_size=logo_size, + logo_padding=logo_padding, + intro_used=intro_used, + outro_used=outro_used, + badge_4k_applied=badge_applied, + thumbnails=thumbnails, + ) + + +@router.post("/videos/{video_id}/download-upscales", response_model=DownloadUpscalesResponse) +async def download_upscales(video_id: str, body: DownloadUpscalesRequest): + """Download completed upscale clips to local output//4k (fk:monitor parity).""" + video = await _repo.get_video(video_id) + if not video: + raise HTTPException(404, "Video not found") + + project_id = body.project_id or _obj(video, "project_id") + if not project_id: + raise HTTPException(400, "project_id is required") + project = await _repo.get_project(project_id) + if not project: + raise HTTPException(404, "Project not found") + + orientation = normalize_orientation( + body.orientation or _obj(video, "orientation") or _obj(project, "orientation") or "VERTICAL" + ) + prefix = orientation.lower() + scenes = sorted(await _repo.list_scenes(video_id), key=lambda s: _obj(s, "display_order", 0)) + if not scenes: + raise HTTPException(400, "No scenes found for this video") + + project_slug = slugify(_obj(project, "name") or "project") + output_dir = OUTPUT_DIR / project_slug / "4k" + output_dir.mkdir(parents=True, exist_ok=True) + + downloaded: list[str] = [] + skipped: list[str] = [] + failed: list[str] = [] + + for scene in scenes: + order = int(_obj(scene, "display_order", 0)) + scene_id = _obj(scene, "id") + status = _obj(scene, f"{prefix}_upscale_status") + if status != "COMPLETED": + skipped.append(f"scene_{order + 1}: status={status or 'UNKNOWN'}") + continue + + url = _obj(scene, f"{prefix}_upscale_url") or _obj(scene, f"{prefix}_video_url") + if not url: + failed.append(f"scene_{order + 1}: missing url") + continue + + out_path = output_dir / f"scene_{order:03d}_{scene_id}.mp4" + if out_path.exists() and not body.overwrite: + skipped.append(f"scene_{order + 1}: exists") + continue + + if str(url).startswith("http"): + ok = await _download_to_file(str(url), out_path) + if not ok: + failed.append(f"scene_{order + 1}: download failed") + continue + downloaded.append(str(out_path)) + continue + + local_src = Path(str(url)) + if local_src.exists(): + out_path.write_bytes(local_src.read_bytes()) + downloaded.append(str(out_path)) + else: + failed.append(f"scene_{order + 1}: source missing") + + return DownloadUpscalesResponse( + project_id=project_id, + video_id=video_id, + orientation=orientation, + output_dir=str(output_dir), + downloaded=downloaded, + skipped=skipped, + failed=failed, + ) + + +@router.post("/videos/{video_id}/smart-continue", response_model=SmartContinueResponse) +async def smart_continue(video_id: str, body: SmartContinueRequest): + """Advance pipeline by one smart step (fk:pipeline parity with review-before-upscale).""" + video = await _repo.get_video(video_id) + if not video: + raise HTTPException(404, "Video not found") + + project_id = body.project_id or _obj(video, "project_id") + if not project_id: + raise HTTPException(400, "project_id is required") + project = await _repo.get_project(project_id) + if not project: + raise HTTPException(404, "Project not found") + + orientation = normalize_orientation( + body.orientation or _obj(video, "orientation") or _obj(project, "orientation") or "VERTICAL" + ) + prefix = orientation.lower() + await crud.update_video(video_id, orientation=orientation) + + scenes = sorted(await _repo.list_scenes(video_id), key=lambda s: _obj(s, "display_order", 0)) + characters = await _repo.get_project_characters(project_id) + project_slug = slugify(_obj(project, "name") or "project") + + if not scenes: + return SmartContinueResponse( + project_id=project_id, + video_id=video_id, + orientation=orientation, + action="noop", + message="No scenes found for this video.", + ) + + scenes_total = len(scenes) + tts_total = sum(1 for s in scenes if (_obj(s, "narrator_text") or "").strip()) + tts_done = sum( + 1 + for s in scenes + if (_obj(s, "narrator_text") or "").strip() + and scene_tts_path(project_slug, int(_obj(s, "display_order", 0)), _obj(s, "id")).exists() + ) + downloads_done = sum( + 1 + for s in scenes + if resolve_4k_file(project_slug, int(_obj(s, "display_order", 0)), _obj(s, "id")) is not None + ) + + async def _maybe_tts() -> bool: + nonlocal tts_done + if not body.include_tts or tts_total == 0 or tts_done >= tts_total: + return False + from agent.api.tts import NarrateVideoRequest, narrate_video + + await narrate_video( + video_id, + NarrateVideoRequest( + project_id=project_id, + template=body.tts_template, + orientation=orientation, + mix=True, + ), + ) + # Refresh tts_done after generation + tts_done = sum( + 1 + for s in scenes + if (_obj(s, "narrator_text") or "").strip() + and scene_tts_path(project_slug, int(_obj(s, "display_order", 0)), _obj(s, "id")).exists() + ) + return True + + # Stage 0: refs + missing_chars = [c for c in characters if not _obj(c, "media_id")] + if missing_chars: + queued = 0 + for c in missing_chars: + if await _enqueue_request_if_needed( + req_type="GENERATE_CHARACTER_IMAGE", + project_id=project_id, + character_id=_obj(c, "id"), + orientation=orientation, + ): + queued += 1 + + return SmartContinueResponse( + project_id=project_id, + video_id=video_id, + orientation=orientation, + action="queue_refs", + message=f"Queued {queued}/{len(missing_chars)} missing reference image requests.", + queued_requests=queued, + requested_types=["GENERATE_CHARACTER_IMAGE"] if queued > 0 else [], + ) + + # Stage 1: images (queue missing images but do not block video stage) + pending_images = [s for s in scenes if not _obj(s, f"{prefix}_image_media_id")] + queued_images = 0 + if pending_images: + for s in pending_images: + if await _enqueue_request_if_needed( + req_type="GENERATE_IMAGE", + project_id=project_id, + video_id=video_id, + scene_id=_obj(s, "id"), + orientation=orientation, + ): + queued_images += 1 + + # Stage 2: videos (only for scenes that already have image) + pending_videos = [ + s + for s in scenes + if _obj(s, f"{prefix}_video_status") != "COMPLETED" and _obj(s, f"{prefix}_image_media_id") + ] + waiting_images = [ + s + for s in scenes + if _obj(s, f"{prefix}_video_status") != "COMPLETED" and not _obj(s, f"{prefix}_image_media_id") + ] + if pending_videos: + tts_started = await _maybe_tts() + queued_videos = 0 + for s in pending_videos: + if await _enqueue_request_if_needed( + req_type="GENERATE_VIDEO", + project_id=project_id, + video_id=video_id, + scene_id=_obj(s, "id"), + orientation=orientation, + ): + queued_videos += 1 + + queued_upscales = 0 + if body.include_upscale: + ready_for_upscale = [ + s + for s in scenes + if _obj(s, f"{prefix}_upscale_status") != "COMPLETED" + and _obj(s, f"{prefix}_video_media_id") + ] + for s in ready_for_upscale: + if await _enqueue_request_if_needed( + req_type="UPSCALE_VIDEO_LOCAL", + project_id=project_id, + video_id=video_id, + scene_id=_obj(s, "id"), + orientation=orientation, + ): + queued_upscales += 1 + + msg = f"Queued {queued_videos}/{len(pending_videos)} scene video requests." + if queued_images > 0: + msg += f" Also queued {queued_images}/{len(pending_images)} image requests for remaining scenes." + if waiting_images: + msg += f" {len(waiting_images)} scene(s) still waiting for image before video." + if body.include_upscale: + msg += f" Queued {queued_upscales} upscale request(s) for scenes that already have video." + if tts_started: + msg += " Triggered TTS in parallel." + requested: list[str] = [] + if queued_videos > 0: + requested.append("GENERATE_VIDEO") + if queued_images > 0: + requested.append("GENERATE_IMAGE") + if queued_upscales > 0: + requested.append("UPSCALE_VIDEO_LOCAL") + if tts_started: + requested.append("TTS_NARRATE") + return SmartContinueResponse( + project_id=project_id, + video_id=video_id, + orientation=orientation, + action=( + "queue_images_videos_upscale" + if queued_images > 0 and queued_upscales > 0 + else "queue_videos_upscale" + if queued_upscales > 0 + else "queue_images_and_videos" + if queued_images > 0 + else "queue_videos" + ), + message=msg, + queued_requests=queued_videos + queued_images + queued_upscales, + requested_types=requested, + ) + + if queued_images > 0: + return SmartContinueResponse( + project_id=project_id, + video_id=video_id, + orientation=orientation, + action="queue_images", + message=f"Queued {queued_images}/{len(pending_images)} scene image requests. No scenes are image-ready for video yet.", + queued_requests=queued_images, + requested_types=["GENERATE_IMAGE"], + ) + + # Stage 2.5: review before upscale + if body.include_upscale and body.review_before_upscale: + try: + review = await review_video( + video_id, + project_id, + mode=body.review_mode, + orientation=orientation, + ) + except Exception as e: + raise HTTPException(500, f"Review failed: {e}") + + failed_reviews = sorted( + [sr for sr in review.scene_reviews if sr.overall_score < body.review_threshold], + key=lambda sr: sr.overall_score, + ) + if failed_reviews: + queued = 0 + req_types: list[str] = [] + reviewed_scene_by_id = {str(_obj(s, "id")): s for s in scenes} + for sr in failed_reviews[: body.max_review_regens]: + scene_id = str(sr.scene_id) + scene = reviewed_scene_by_id.get(scene_id) + if not scene: + continue + + fix = (sr.fix_guide or "").strip() + current_video_prompt = (_obj(scene, "video_prompt") or "").strip() + if fix: + marker = f"[REVIEW FIX] {fix}" + if marker.lower() not in current_video_prompt.lower(): + next_video_prompt = f"{current_video_prompt}\n{marker}".strip() if current_video_prompt else marker + await _repo.update("scene", scene_id, video_prompt=next_video_prompt) + + req_type = ( + "REGENERATE_IMAGE" + if sr.overall_score < body.low_score_regen_image_threshold or bool(getattr(sr, "has_critical_errors", False)) + else "REGENERATE_VIDEO" + ) + if await _enqueue_request_if_needed( + req_type=req_type, + project_id=project_id, + video_id=video_id, + scene_id=scene_id, + orientation=orientation, + ): + queued += 1 + req_types.append(req_type) + + return SmartContinueResponse( + project_id=project_id, + video_id=video_id, + orientation=orientation, + action="review_regen", + message=( + f"Review found {len(failed_reviews)} scene(s) below {body.review_threshold:.1f}. " + f"Queued {queued} regen request(s)." + ), + queued_requests=queued, + requested_types=sorted(set(req_types)), + review={ + "mode": body.review_mode, + "threshold": body.review_threshold, + "overall_score": review.overall_score, + "failed_count": len(failed_reviews), + "failed_scene_ids": [str(r.scene_id) for r in failed_reviews], + }, + ) + + # Stage 3: upscale + upscales_pending = [ + s + for s in scenes + if _obj(s, f"{prefix}_upscale_status") != "COMPLETED" + and _obj(s, f"{prefix}_video_media_id") + ] + if body.include_upscale and upscales_pending: + tts_started = await _maybe_tts() + queued = 0 + for s in upscales_pending: + if await _enqueue_request_if_needed( + req_type="UPSCALE_VIDEO_LOCAL", + project_id=project_id, + video_id=video_id, + scene_id=_obj(s, "id"), + orientation=orientation, + ): + queued += 1 + msg = f"Queued {queued}/{len(upscales_pending)} upscale request(s)." + if tts_started: + msg += " Triggered TTS in parallel." + requested = ["UPSCALE_VIDEO_LOCAL"] if queued > 0 else [] + if tts_started: + requested.append("TTS_NARRATE") + return SmartContinueResponse( + project_id=project_id, + video_id=video_id, + orientation=orientation, + action="queue_upscale", + message=msg, + queued_requests=queued, + requested_types=requested, + ) + + # Optional rolling downloads + downloaded_meta: Optional[dict[str, int]] = None + if body.auto_download_upscales and body.include_upscale: + dl = await download_upscales( + video_id, + DownloadUpscalesRequest( + project_id=project_id, + orientation=orientation, + overwrite=False, + ), + ) + downloaded_meta = { + "downloaded": len(dl.downloaded), + "skipped": len(dl.skipped), + "failed": len(dl.failed), + } + # refresh local download count after potential new downloads + downloads_done = sum( + 1 + for s in scenes + if resolve_4k_file(project_slug, int(_obj(s, "display_order", 0)), _obj(s, "id")) is not None + ) + + # Optional concat when fully ready + if body.include_concat: + upscale_ready = (not body.include_upscale) or all( + _obj(s, f"{prefix}_upscale_status") == "COMPLETED" for s in scenes + ) + tts_ready = (not body.include_tts) or (tts_total == 0) or (tts_done >= tts_total) + downloads_ready = (not body.include_upscale) or (not body.auto_download_upscales) or (downloads_done >= scenes_total) + if upscale_ready and tts_ready and downloads_ready: + from agent.api.videos import ConcatRequest, concat_video + + concat_res = await concat_video( + video_id, + ConcatRequest( + project_id=project_id, + orientation=orientation, + with_narrator=True, + with_music=False, + fit_narrator=body.fit_narrator, + narrator_buffer=body.narrator_buffer, + ), + ) + return SmartContinueResponse( + project_id=project_id, + video_id=video_id, + orientation=orientation, + action="concat_done", + message="Pipeline completed and concat finished.", + concat_output=str(concat_res.output_path), + downloaded=downloaded_meta, + ) + return SmartContinueResponse( + project_id=project_id, + video_id=video_id, + orientation=orientation, + action="wait_concat", + message=( + f"Waiting before concat (upscale_ready={upscale_ready}, " + f"tts_ready={tts_ready}, downloads_ready={downloads_ready})." + ), + downloaded=downloaded_meta, + ) + + # No further stages + tts_started = await _maybe_tts() + return SmartContinueResponse( + project_id=project_id, + video_id=video_id, + orientation=orientation, + action="completed", + message="No pending stages detected for current settings." + (" TTS triggered." if tts_started else ""), + requested_types=["TTS_NARRATE"] if tts_started else [], + downloaded=downloaded_meta, + ) diff --git a/agent/api/youtube.py b/agent/api/youtube.py new file mode 100644 index 0000000..e475aa0 --- /dev/null +++ b/agent/api/youtube.py @@ -0,0 +1,202 @@ +"""YouTube upload endpoints.""" +from __future__ import annotations + +import asyncio +import importlib.util +import inspect +from pathlib import Path +from types import ModuleType +from typing import Literal + +from fastapi import APIRouter, HTTPException +from pydantic import BaseModel, Field + +from agent.config import BASE_DIR, OUTPUT_DIR +from agent.sdk.persistence.sqlite_repository import SQLiteRepository +from agent.services.event_bus import event_bus +from agent.utils.slugify import slugify + +router = APIRouter(prefix="/youtube", tags=["youtube"]) + +_repo = SQLiteRepository() + + +class YouTubeUploadRequest(BaseModel): + project_id: str + video_id: str + title: str = Field(..., min_length=1, max_length=100) + description: str = "" + tags: list[str] = Field(default_factory=list) + privacy_status: Literal["private", "unlisted", "public"] = "private" + video_path: str | None = None + channel_name: str | None = None + schedule_at: str | None = None + + +class YouTubeUploadResponse(BaseModel): + video_id: str + url: str | None = None + channel_name: str + + +def _load_youtube_module() -> ModuleType | None: + module_path = BASE_DIR / "youtube" / "upload.py" + if not module_path.exists(): + return None + spec = importlib.util.spec_from_file_location("flowkit_youtube_upload", module_path) + if not spec or not spec.loader: + return None + module = importlib.util.module_from_spec(spec) + spec.loader.exec_module(module) + return module + + +def _detect_channel_name(explicit: str | None) -> str: + if explicit: + return explicit + channels_dir = BASE_DIR / "youtube" / "channels" + if not channels_dir.exists(): + raise HTTPException(400, "Missing youtube/channels directory. Configure a channel first.") + + channels = [p.name for p in channels_dir.iterdir() if p.is_dir()] + if not channels: + raise HTTPException(400, "No channel found under youtube/channels. Configure one and retry.") + if len(channels) > 1: + raise HTTPException(400, "Multiple channels detected. Please pass channel_name explicitly.") + return channels[0] + + +def _resolve_video_path( + requested_path: str | None, + project_slug: str, + orientation: str, + sdk_video, +) -> Path: + if requested_path: + p = Path(requested_path).expanduser() + if p.exists(): + return p + raise HTTPException(400, f"video_path does not exist: {requested_path}") + + # Prefer explicit local final files. + final_candidates = [ + OUTPUT_DIR / project_slug / f"{project_slug}_final_{orientation.lower()}_music.mp4", + OUTPUT_DIR / project_slug / f"{project_slug}_final_{orientation.lower()}.mp4", + OUTPUT_DIR / project_slug / f"{project_slug}_final_vertical_music.mp4", + OUTPUT_DIR / project_slug / f"{project_slug}_final_vertical.mp4", + OUTPUT_DIR / project_slug / f"{project_slug}_final_horizontal_music.mp4", + OUTPUT_DIR / project_slug / f"{project_slug}_final_horizontal.mp4", + ] + for candidate in final_candidates: + if candidate.exists(): + return candidate + + # Fallback to video table local path. + for candidate in (sdk_video.vertical_url, sdk_video.horizontal_url): + if not candidate: + continue + p = Path(candidate) + if p.exists(): + return p + + raise HTTPException( + 400, + "Could not infer final video path. Please run concat first or pass video_path explicitly.", + ) + + +async def _call_upload_function(upload_fn, kwargs: dict): + sig = inspect.signature(upload_fn) + accepted_kwargs = { + key: value + for key, value in kwargs.items() + if key in sig.parameters and value is not None + } + if inspect.iscoroutinefunction(upload_fn): + return await upload_fn(**accepted_kwargs) + return await asyncio.to_thread(upload_fn, **accepted_kwargs) + + +@router.post("/upload", response_model=YouTubeUploadResponse) +async def upload_to_youtube(body: YouTubeUploadRequest): + sdk_video = await _repo.get_video(body.video_id) + if not sdk_video: + raise HTTPException(404, "Video not found") + if sdk_video.project_id != body.project_id: + raise HTTPException(400, "project_id does not match this video") + + project = await _repo.get_project(body.project_id) + if not project: + raise HTTPException(404, "Project not found") + + module = _load_youtube_module() + if not module: + raise HTTPException( + 501, + "YouTube uploader is not installed (missing youtube/upload.py).", + ) + upload_fn = getattr(module, "upload_video", None) + if not callable(upload_fn): + raise HTTPException(501, "youtube/upload.py is missing required function: upload_video") + + channel_name = _detect_channel_name(body.channel_name) + orientation = (sdk_video.orientation or project.orientation or "VERTICAL").upper() + project_slug = slugify(project.name) + video_path = _resolve_video_path(body.video_path, project_slug, orientation, sdk_video) + + try: + raw_result = await _call_upload_function( + upload_fn, + { + "channel_name": channel_name, + "video_path": str(video_path), + "title": body.title.strip(), + "description": body.description.strip(), + "tags": body.tags, + "privacy_status": body.privacy_status, + "privacy": body.privacy_status, + "schedule_at": body.schedule_at, + }, + ) + except HTTPException: + raise + except Exception as e: + raise HTTPException(502, f"YouTube upload failed: {e}") from e + + yt_video_id = None + url = None + + if isinstance(raw_result, str): + yt_video_id = raw_result + elif isinstance(raw_result, dict): + yt_video_id = ( + raw_result.get("video_id") + or raw_result.get("youtube_id") + or raw_result.get("id") + ) + url = raw_result.get("url") + elif isinstance(raw_result, tuple) and raw_result: + yt_video_id = raw_result[0] + if len(raw_result) > 1: + url = raw_result[1] + + if not yt_video_id: + raise HTTPException(502, "YouTube uploader returned no video_id") + if not url: + url = f"https://youtu.be/{yt_video_id}" + + await _repo.update( + "video", + body.video_id, + youtube_id=yt_video_id, + privacy=body.privacy_status, + tags=",".join(body.tags) if body.tags else None, + ) + await event_bus.emit("youtube_uploaded", { + "video_id": body.video_id, + "youtube_id": yt_video_id, + "url": url, + "channel_name": channel_name, + }) + + return YouTubeUploadResponse(video_id=yt_video_id, url=url, channel_name=channel_name) diff --git a/agent/config.py b/agent/config.py index d180bdb..61dd697 100644 --- a/agent/config.py +++ b/agent/config.py @@ -21,13 +21,47 @@ RECAPTCHA_SITE_KEY = os.environ.get("RECAPTCHA_SITE_KEY", "6LdsFiUsAAAAAIjVDZcuLhaHiDn5nnHVXVRQGeMV") # ─── Worker ────────────────────────────────────────────────── -POLL_INTERVAL = int(os.environ.get("POLL_INTERVAL", "5")) -VIDEO_POLL_INTERVAL = int(os.environ.get("VIDEO_POLL_INTERVAL", "10")) # polling interval for video/upscale status +# Stability profile (desktop default): +# - Keep moderate overall concurrency +# - Throttle image requests to reduce reCAPTCHA traffic flags +POLL_INTERVAL = int(os.environ.get("POLL_INTERVAL", "1")) +VIDEO_POLL_INTERVAL = int(os.environ.get("VIDEO_POLL_INTERVAL", "15")) # polling interval for video/upscale status MAX_RETRIES = int(os.environ.get("MAX_RETRIES", "5")) VIDEO_POLL_TIMEOUT = int(os.environ.get("VIDEO_POLL_TIMEOUT", "420")) -API_COOLDOWN = int(os.environ.get("API_COOLDOWN", "10")) # seconds between API calls (anti-spam) -MAX_CONCURRENT_REQUESTS = int(os.environ.get("MAX_CONCURRENT_REQUESTS", "5")) # Google Flow max parallel requests +API_COOLDOWN = float(os.environ.get("API_COOLDOWN", "1")) # seconds between API calls +MAX_CONCURRENT_REQUESTS = int(os.environ.get("MAX_CONCURRENT_REQUESTS", "4")) +# All Flow generation requests consume reCAPTCHA budget. +# Keep this conservative to avoid unusual-traffic lockouts. +MAX_CONCURRENT_CAPTCHA_REQUESTS = int(os.environ.get("MAX_CONCURRENT_CAPTCHA_REQUESTS", "1")) +CAPTCHA_API_COOLDOWN = float(os.environ.get("CAPTCHA_API_COOLDOWN", "10")) # minimum gap between captcha-consuming API calls +MAX_CONCURRENT_IMAGE_REQUESTS = int(os.environ.get("MAX_CONCURRENT_IMAGE_REQUESTS", "1")) +IMAGE_API_COOLDOWN = float(os.environ.get("IMAGE_API_COOLDOWN", "12")) # minimum gap between image/edit requests +# Video queue can run in parallel without overloading captcha as heavily as image generation. +MAX_CONCURRENT_VIDEO_REQUESTS = int(os.environ.get("MAX_CONCURRENT_VIDEO_REQUESTS", "4")) +VIDEO_API_COOLDOWN = float(os.environ.get("VIDEO_API_COOLDOWN", "1")) # min gap for video submit/status jobs +# Local 4K upscale is CPU/GPU intensive; keep strict concurrency by default. +MAX_CONCURRENT_LOCAL_UPSCALE_REQUESTS = int(os.environ.get("MAX_CONCURRENT_LOCAL_UPSCALE_REQUESTS", "1")) +# Ref stage (character + location) can run slightly faster than scene image stage. +MAX_CONCURRENT_CHARACTER_REF_REQUESTS = int(os.environ.get("MAX_CONCURRENT_CHARACTER_REF_REQUESTS", "2")) +CHARACTER_IMAGE_API_COOLDOWN = float(os.environ.get("CHARACTER_IMAGE_API_COOLDOWN", "5")) # min gap for character/location ref ops +CAPTCHA_RETRY_LIMIT = int(os.environ.get("CAPTCHA_RETRY_LIMIT", "10")) +CAPTCHA_RETRY_BACKOFF_BASE = int(os.environ.get("CAPTCHA_RETRY_BACKOFF_BASE", "45")) # seconds +CAPTCHA_RETRY_BACKOFF_MAX = int(os.environ.get("CAPTCHA_RETRY_BACKOFF_MAX", "1800")) # seconds +CAPTCHA_GROUP_PAUSE_SEC = int(os.environ.get("CAPTCHA_GROUP_PAUSE_SEC", "180")) # pause all image jobs +CAPTCHA_TRAFFIC_PAUSE_SEC = int(os.environ.get("CAPTCHA_TRAFFIC_PAUSE_SEC", "900")) # strict pause for TOO_MUCH_TRAFFIC +CAPTCHA_SAFE_MODE_SEC = int(os.environ.get("CAPTCHA_SAFE_MODE_SEC", "1800")) # temporary image safe-mode window +CAPTCHA_SAFE_MODE_IMAGE_CONCURRENCY = int(os.environ.get("CAPTCHA_SAFE_MODE_IMAGE_CONCURRENCY", "1")) +CAPTCHA_SAFE_MODE_IMAGE_COOLDOWN = float(os.environ.get("CAPTCHA_SAFE_MODE_IMAGE_COOLDOWN", "20")) +CAPTCHA_CONTENT_TIMEOUT_PAUSE_SEC = int(os.environ.get("CAPTCHA_CONTENT_TIMEOUT_PAUSE_SEC", "90")) +OPERATION_FAILED_RETRY_BASE_SEC = int(os.environ.get("OPERATION_FAILED_RETRY_BASE_SEC", "45")) +REQUEST_DISPATCH_TIMEOUT = int(os.environ.get("REQUEST_DISPATCH_TIMEOUT", "120")) # per-request dispatch timeout STALE_PROCESSING_TIMEOUT = int(os.environ.get("STALE_PROCESSING_TIMEOUT", "600")) # 10 min +STALE_PENDING_LOCAL_UPSCALE_TIMEOUT = int( + os.environ.get("STALE_PENDING_LOCAL_UPSCALE_TIMEOUT", "5400") +) # 90 min +FLOW_CREDITS_CACHE_TTL_SEC = int(os.environ.get("FLOW_CREDITS_CACHE_TTL_SEC", "1800")) +FLOW_CREDITS_ERROR_TTL_SEC = int(os.environ.get("FLOW_CREDITS_ERROR_TTL_SEC", "30")) +TIER_SYNC_MIN_INTERVAL_SEC = int(os.environ.get("TIER_SYNC_MIN_INTERVAL_SEC", "1800")) # ─── Model Keys (loaded from models.json for easy updates) ── _MODELS_FILE = Path(__file__).parent / "models.json" @@ -57,11 +91,21 @@ SHARED_OUTPUT_DIR = OUTPUT_DIR / "_shared" TTS_TEMPLATES_DIR = SHARED_OUTPUT_DIR / "tts_templates" MUSIC_OUTPUT_DIR = SHARED_OUTPUT_DIR / "music" +TTS_SETTINGS_PATH = BASE_DIR / "tts_settings.json" # ─── TTS (OmniVoice) ───────────────────────────────────────── TTS_MODEL = os.environ.get("TTS_MODEL", "k2-fsa/OmniVoice") TTS_DEVICE = os.environ.get("TTS_DEVICE", "cpu") # MPS produces gibberish; CPU+fp32 works TTS_SAMPLE_RATE = int(os.environ.get("TTS_SAMPLE_RATE", "24000")) +TTS_PROVIDER = os.environ.get("TTS_PROVIDER", "elevenlabs").strip().lower() # elevenlabs | omnivoice + +# ─── TTS (ElevenLabs) ──────────────────────────────────────── +ELEVENLABS_API_BASE = os.environ.get("ELEVENLABS_API_BASE", "https://api.elevenlabs.io").rstrip("/") +ELEVENLABS_API_KEY = os.environ.get("ELEVENLABS_API_KEY", "").strip() +ELEVENLABS_MODEL_ID = os.environ.get("ELEVENLABS_MODEL_ID", "eleven_multilingual_v2").strip() +ELEVENLABS_DEFAULT_VOICE_ID = os.environ.get("ELEVENLABS_DEFAULT_VOICE_ID", "").strip() +ELEVENLABS_TIMEOUT_SEC = float(os.environ.get("ELEVENLABS_TIMEOUT_SEC", "60")) +ELEVENLABS_MAX_RETRIES = int(os.environ.get("ELEVENLABS_MAX_RETRIES", "2")) # ─── Review / Claude Vision ────────────────────────────────── ANTHROPIC_API_KEY = os.environ.get("ANTHROPIC_API_KEY", "") diff --git a/agent/db/crud.py b/agent/db/crud.py index 51f7a1d..707caf3 100644 --- a/agent/db/crud.py +++ b/agent/db/crud.py @@ -18,7 +18,7 @@ def _validate_table(table: str) -> None: # Column whitelists per table — prevents SQL injection via kwargs keys _COLUMNS = { "character": {"name", "slug", "entity_type", "description", "image_prompt", "voice_description", "reference_image_url", "media_id", "updated_at"}, - "project": {"name", "description", "story", "thumbnail_url", "language", "status", "user_paygate_tier", "narrator_voice", "narrator_ref_audio", "material", "allow_music", "allow_voice", "updated_at"}, + "project": {"name", "description", "story", "thumbnail_url", "language", "status", "user_paygate_tier", "narrator_voice", "narrator_ref_audio", "material", "orientation", "allow_music", "allow_voice", "updated_at"}, "video": {"title", "description", "display_order", "status", "orientation", "vertical_url", "horizontal_url", "thumbnail_url", "duration", "resolution", "youtube_id", "privacy", "tags", "updated_at"}, "scene": {"prompt", "image_prompt", "video_prompt", "character_names", "parent_scene_id", "chain_type", @@ -111,13 +111,13 @@ async def list_characters() -> list[dict]: # ─── Project ──────────────────────────────────────────────── -async def create_project(name: str, description: str = None, story: str = None, language: str = "en", user_paygate_tier: str = "PAYGATE_TIER_ONE", id: str = None, material: str = None, allow_music: bool = False, allow_voice: bool = False) -> dict: +async def create_project(name: str, description: str = None, story: str = None, language: str = "en", user_paygate_tier: str = "PAYGATE_TIER_ONE", id: str = None, material: str = None, orientation: str = "VERTICAL", allow_music: bool = False, allow_voice: bool = False) -> dict: db = await get_db() pid, now = id or _uuid(), _now() async with _db_lock: await db.execute( - "INSERT INTO project (id,name,description,story,language,user_paygate_tier,material,allow_music,allow_voice,created_at,updated_at) VALUES (?,?,?,?,?,?,?,?,?,?,?)", - (pid, name, description, story, language, user_paygate_tier, material, int(allow_music), int(allow_voice), now, now)) + "INSERT INTO project (id,name,description,story,language,user_paygate_tier,material,orientation,allow_music,allow_voice,created_at,updated_at) VALUES (?,?,?,?,?,?,?,?,?,?,?,?)", + (pid, name, description, story, language, user_paygate_tier, material, orientation, int(allow_music), int(allow_voice), now, now)) await db.commit() return await _get_with_db(db, "project", "id", pid) @@ -188,19 +188,21 @@ async def create_scene(video_id: str, display_order: int, prompt: str, transition_prompt: str = None, character_names: list[str] = None, parent_scene_id: str = None, chain_type: str = "ROOT", - source: str = "root") -> dict: + source: str = "root", + narrator_text: str = None) -> dict: db = await get_db() sid, now = _uuid(), _now() chars_json = json.dumps(character_names) if character_names else None async with _db_lock: await db.execute( """INSERT INTO scene (id,video_id,display_order,prompt,image_prompt,video_prompt,transition_prompt,character_names, - parent_scene_id,chain_type,source,created_at,updated_at) VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?)""", + parent_scene_id,chain_type,source,narrator_text,created_at,updated_at) VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?)""", (sid, video_id, display_order, prompt, image_prompt, video_prompt, transition_prompt, chars_json, - parent_scene_id, chain_type, source, now, now)) + parent_scene_id, chain_type, source, narrator_text, now, now)) await db.commit() return await _get_with_db(db, "scene", "id", sid) + async def get_scene(sid: str): return await _get("scene", "id", sid) async def update_scene(sid: str, **kw): return await _update("scene", "id", sid, **kw) async def delete_scene(sid: str): return await _delete("scene", "id", sid) @@ -230,6 +232,53 @@ async def list_characters_by_media_id(media_id: str) -> list[dict]: return [dict(r) for r in await cur.fetchall()] +async def clear_redirect_media_urls() -> dict: + """Clear unstable TRPC redirect URLs so UI will fetch fresh signed URLs. + + Redirect URLs (media.getMediaUrlRedirect) can break after restart/session change. + We keep media_id and reset URL slots to NULL, forcing refresh on demand. + """ + db = await get_db() + now = _now() + scene_fields = ( + "vertical_image_url", + "horizontal_image_url", + "vertical_video_url", + "horizontal_video_url", + "vertical_upscale_url", + "horizontal_upscale_url", + ) + scene_slots_cleared = 0 + characters_cleared = 0 + async with _db_lock: + for field in scene_fields: + cur = await db.execute( + f""" + UPDATE scene + SET {field}=NULL, updated_at=? + WHERE {field} LIKE '%media.getMediaUrlRedirect%' + """, + (now,), + ) + scene_slots_cleared += int(cur.rowcount or 0) + + cur = await db.execute( + """ + UPDATE character + SET reference_image_url=NULL, updated_at=? + WHERE reference_image_url LIKE '%media.getMediaUrlRedirect%' + """, + (now,), + ) + characters_cleared = int(cur.rowcount or 0) + await db.commit() + + return { + "scene_slots_cleared": scene_slots_cleared, + "characters_cleared": characters_cleared, + } + + # ─── Request ──────────────────────────────────────────────── async def create_request(req_type: str, orientation: str = None, @@ -271,6 +320,53 @@ async def list_pending_requests() -> list[dict]: return [dict(r) for r in await cur.fetchall()] +async def migrate_upscale_requests_to_local() -> int: + """Back-compat: convert legacy UPSCALE_VIDEO rows to UPSCALE_VIDEO_LOCAL.""" + db = await get_db() + now = _now() + async with _db_lock: + cur = await db.execute( + """ + UPDATE request + SET type='UPSCALE_VIDEO_LOCAL', + updated_at=?, + next_retry_at=CASE WHEN status='PENDING' THEN NULL ELSE next_retry_at END, + retry_count=CASE WHEN status='PENDING' THEN 0 ELSE retry_count END, + error_message=CASE WHEN status='PENDING' THEN 'migrated to local upscale' ELSE error_message END + WHERE type='UPSCALE_VIDEO' + """, + (now,), + ) + await db.commit() + return int(cur.rowcount or 0) + + +async def fail_stale_pending_local_upscale(timeout_seconds: int = 5400) -> int: + """Mark very old pending local-upscale requests as FAILED to avoid restart storms.""" + from datetime import datetime, timedelta, timezone + + cutoff = ( + datetime.now(timezone.utc) - timedelta(seconds=max(60, int(timeout_seconds))) + ).strftime("%Y-%m-%dT%H:%M:%SZ") + db = await get_db() + async with _db_lock: + cur = await db.execute( + """ + UPDATE request + SET status='FAILED', + next_retry_at=NULL, + error_message=COALESCE(error_message, 'stale pending local upscale auto-stopped on startup'), + updated_at=? + WHERE status='PENDING' + AND type='UPSCALE_VIDEO_LOCAL' + AND updated_at < ? + """, + (_now(), cutoff), + ) + await db.commit() + return int(cur.rowcount or 0) + + async def list_actionable_requests(exclude_ids: set[str] = None, limit: int = 5) -> list[dict]: """Priority-ordered fetch of PENDING requests ready to process.""" db = await get_db() @@ -283,6 +379,11 @@ async def list_actionable_requests(exclude_ids: set[str] = None, limit: int = 5) WHERE status = 'PENDING' AND (next_retry_at IS NULL OR next_retry_at <= ?) ORDER BY + CASE + WHEN type IN ('GENERATE_VIDEO','REGENERATE_VIDEO','GENERATE_VIDEO_REFS','UPSCALE_VIDEO') + AND request_id IS NOT NULL THEN 1 + ELSE 0 + END, CASE type WHEN 'GENERATE_CHARACTER_IMAGE' THEN 0 WHEN 'REGENERATE_CHARACTER_IMAGE' THEN 0 @@ -293,6 +394,7 @@ async def list_actionable_requests(exclude_ids: set[str] = None, limit: int = 5) WHEN 'GENERATE_VIDEO' THEN 2 WHEN 'GENERATE_VIDEO_REFS' THEN 2 WHEN 'UPSCALE_VIDEO' THEN 3 + WHEN 'UPSCALE_VIDEO_LOCAL' THEN 3 ELSE 2 END, created_at ASC diff --git a/agent/db/schema.py b/agent/db/schema.py index 365ec5d..dc7bba4 100644 --- a/agent/db/schema.py +++ b/agent/db/schema.py @@ -138,7 +138,7 @@ video_id TEXT REFERENCES video(id) ON DELETE CASCADE, scene_id TEXT REFERENCES scene(id) ON DELETE CASCADE, character_id TEXT REFERENCES character(id) ON DELETE CASCADE, - type TEXT NOT NULL CHECK(type IN ('GENERATE_IMAGE','REGENERATE_IMAGE','EDIT_IMAGE','GENERATE_VIDEO','REGENERATE_VIDEO','GENERATE_VIDEO_REFS','UPSCALE_VIDEO','GENERATE_CHARACTER_IMAGE','REGENERATE_CHARACTER_IMAGE','EDIT_CHARACTER_IMAGE')), + type TEXT NOT NULL CHECK(type IN ('GENERATE_IMAGE','REGENERATE_IMAGE','EDIT_IMAGE','GENERATE_VIDEO','REGENERATE_VIDEO','GENERATE_VIDEO_REFS','UPSCALE_VIDEO','UPSCALE_VIDEO_LOCAL','GENERATE_CHARACTER_IMAGE','REGENERATE_CHARACTER_IMAGE','EDIT_CHARACTER_IMAGE')), orientation TEXT CHECK(orientation IN ('VERTICAL','HORIZONTAL')), status TEXT NOT NULL DEFAULT 'PENDING' CHECK(status IN ('PENDING','PROCESSING','COMPLETED','FAILED')), request_id TEXT, -- external operation ID @@ -217,6 +217,8 @@ async def init_db(): needs_recreate = True # old GENERATE_IMAGES typo if 'REGENERATE_IMAGE' not in table_sql: needs_recreate = True # missing REGENERATE/EDIT types + if 'UPSCALE_VIDEO_LOCAL' not in table_sql: + needs_recreate = True # add local 4k upscaler request type if needs_recreate: await db.execute("PRAGMA foreign_keys=OFF") await db.execute("ALTER TABLE request RENAME TO _request_old") @@ -227,7 +229,7 @@ async def init_db(): video_id TEXT REFERENCES video(id) ON DELETE CASCADE, scene_id TEXT REFERENCES scene(id) ON DELETE CASCADE, character_id TEXT REFERENCES character(id) ON DELETE CASCADE, - type TEXT NOT NULL CHECK(type IN ('GENERATE_IMAGE','REGENERATE_IMAGE','EDIT_IMAGE','GENERATE_VIDEO','REGENERATE_VIDEO','GENERATE_VIDEO_REFS','UPSCALE_VIDEO','GENERATE_CHARACTER_IMAGE','REGENERATE_CHARACTER_IMAGE','EDIT_CHARACTER_IMAGE')), + type TEXT NOT NULL CHECK(type IN ('GENERATE_IMAGE','REGENERATE_IMAGE','EDIT_IMAGE','GENERATE_VIDEO','REGENERATE_VIDEO','GENERATE_VIDEO_REFS','UPSCALE_VIDEO','UPSCALE_VIDEO_LOCAL','GENERATE_CHARACTER_IMAGE','REGENERATE_CHARACTER_IMAGE','EDIT_CHARACTER_IMAGE')), orientation TEXT CHECK(orientation IN ('VERTICAL','HORIZONTAL')), status TEXT NOT NULL DEFAULT 'PENDING' CHECK(status IN ('PENDING','PROCESSING','COMPLETED','FAILED')), request_id TEXT, @@ -276,6 +278,9 @@ async def init_db(): if "allow_voice" not in project_columns: await db.execute("ALTER TABLE project ADD COLUMN allow_voice INTEGER NOT NULL DEFAULT 0") logger.info("Migrated: added allow_voice column to project table") + if "orientation" not in project_columns: + await db.execute("ALTER TABLE project ADD COLUMN orientation TEXT NOT NULL DEFAULT 'VERTICAL'") + logger.info("Migrated: added orientation column to project table") # Migration: add orientation to video table + backfill from scene data cursor = await db.execute("PRAGMA table_info(video)") video_columns = {row[1] for row in await cursor.fetchall()} diff --git a/agent/main.py b/agent/main.py index 4c4c75b..d06109d 100644 --- a/agent/main.py +++ b/agent/main.py @@ -23,6 +23,8 @@ from agent.api.music import router as music_router from agent.api.models import router as models_router from agent.api.active_project import router as active_project_router +from agent.api.youtube import router as youtube_router +from agent.api.workflows import router as workflows_router from agent.worker.processor import get_worker_controller from agent.services.flow_client import get_flow_client from agent.services.event_bus import event_bus @@ -33,6 +35,7 @@ # ─── WebSocket Server for Extension ───────────────────────── +_ws_listener_active = False async def ws_handler(websocket): """Handle a Chrome extension WebSocket connection.""" @@ -55,15 +58,31 @@ async def ws_handler(websocket): except websockets.ConnectionClosed: pass finally: - client.clear_extension() + client.clear_extension(websocket) logger.info("Extension disconnected") async def run_ws_server(): - """Run WebSocket server for extension connections.""" - async with websockets.serve(ws_handler, WS_HOST, WS_PORT): - logger.info("WebSocket server listening on ws://%s:%d", WS_HOST, WS_PORT) - await asyncio.Future() # run forever + """Run WebSocket server for extension connections. + + Keep retrying so temporary bind/runtime errors don't leave the extension + permanently disconnected while API server is still alive. + """ + global _ws_listener_active + retry_delay_sec = 2 + while True: + try: + async with websockets.serve(ws_handler, WS_HOST, WS_PORT): + _ws_listener_active = True + logger.info("WebSocket server listening on ws://%s:%d", WS_HOST, WS_PORT) + await asyncio.Future() # run forever until cancelled + except asyncio.CancelledError: + _ws_listener_active = False + raise + except Exception as e: + _ws_listener_active = False + logger.exception("WS server crashed (%s). Retrying in %ss", e, retry_delay_sec) + await asyncio.sleep(retry_delay_sec) # ─── FastAPI App ───────────────────────────────────────────── @@ -72,6 +91,21 @@ async def run_ws_server(): async def lifespan(app: FastAPI): await init_db() + # One-time hygiene: drop unstable redirect URLs persisted from older builds. + from agent.db import crud as db_crud + try: + cleaned = await db_crud.clear_redirect_media_urls() + slots = int(cleaned.get("scene_slots_cleared", 0)) + chars = int(cleaned.get("characters_cleared", 0)) + if slots or chars: + logger.info( + "Startup URL cleanup: cleared %d scene URL slots and %d character refs (redirect URLs)", + slots, + chars, + ) + except Exception as e: + logger.warning("Startup URL cleanup failed: %s", e) + # Load custom materials from DB into in-memory registry from agent.db.crud import list_materials as db_list_materials from agent.materials import register_material, _BUILTIN_IDS @@ -90,9 +124,13 @@ async def lifespan(app: FastAPI): controller = get_worker_controller() - # SIGTERM handler for graceful shutdown + # SIGTERM handler for graceful shutdown. + # Windows event loop does not implement add_signal_handler(). loop = asyncio.get_event_loop() - loop.add_signal_handler(signal.SIGTERM, controller.request_shutdown) + try: + loop.add_signal_handler(signal.SIGTERM, controller.request_shutdown) + except (NotImplementedError, RuntimeError, ValueError): + logger.info("SIGTERM handler not supported on this platform/event loop; skip signal hook.") # Start background tasks ws_task = asyncio.create_task(run_ws_server()) @@ -128,6 +166,8 @@ async def lifespan(app: FastAPI): app.include_router(tts_router, prefix="/api") app.include_router(materials_router, prefix="/api") app.include_router(music_router, prefix="/api") +app.include_router(youtube_router, prefix="/api") +app.include_router(workflows_router, prefix="/api") app.include_router(models_router) app.include_router(active_project_router) @@ -164,10 +204,16 @@ async def ext_callback(request: Request): @app.get("/health") async def health(): client = get_flow_client() + ext_status = await client.get_extension_status() + runtime_connected = bool(ext_status.get("runtime_connected")) return { "status": "ok", "version": "0.2.0", - "extension_connected": client.connected, + "extension_connected": runtime_connected, + "extension_ws_connected": client.connected, + "extension_state": ext_status.get("state"), + "extension_manual_disconnect": ext_status.get("manual_disconnect"), + "ws_server_listening": _ws_listener_active, "ws": client.ws_stats, } @@ -180,7 +226,7 @@ async def dashboard_ws(websocket: WebSocket): # Reject cross-origin connections (only allow localhost) origin = (websocket.headers.get("origin") or "").lower() if origin and not any(origin.startswith(p) for p in ( - "http://127.0.0.1", "http://localhost", "chrome-extension://", + "http://127.0.0.1", "http://localhost", "chrome-extension://", "file://", )): await websocket.close(code=4003, reason="Origin not allowed") return @@ -192,13 +238,14 @@ async def dashboard_ws(websocket: WebSocket): client = get_flow_client() controller = get_worker_controller() from agent.db import crud + ext_status = await client.get_extension_status() pending_requests = await crud.list_requests(status="PENDING") processing_requests = await crud.list_requests(status="PROCESSING") snapshot = { "type": "snapshot", "health": { "status": "ok", - "extension_connected": client.connected, + "extension_connected": bool(ext_status.get("runtime_connected")), }, "requests": pending_requests + processing_requests, "worker": { diff --git a/agent/models.json b/agent/models.json index 6bd4242..f3af5fb 100644 --- a/agent/models.json +++ b/agent/models.json @@ -3,15 +3,15 @@ "PAYGATE_TIER_TWO": { "frame_2_video": { "VIDEO_ASPECT_RATIO_LANDSCAPE": "veo_3_1_i2v_s_fast_ultra_relaxed", - "VIDEO_ASPECT_RATIO_PORTRAIT": "veo_3_1_i2v_s_fast_ultra_relaxed" + "VIDEO_ASPECT_RATIO_PORTRAIT": "veo_3_1_i2v_s_fast_portrait_ultra_relaxed" }, "start_end_frame_2_video": { "VIDEO_ASPECT_RATIO_LANDSCAPE": "veo_3_1_i2v_s_fast_ultra_relaxed", - "VIDEO_ASPECT_RATIO_PORTRAIT": "veo_3_1_i2v_s_fast_ultra_relaxed" + "VIDEO_ASPECT_RATIO_PORTRAIT": "veo_3_1_i2v_s_fast_portrait_fl_ultra_relaxed" }, "reference_frame_2_video": { "VIDEO_ASPECT_RATIO_LANDSCAPE": "veo_3_1_r2v_fast_landscape_ultra_relaxed", - "VIDEO_ASPECT_RATIO_PORTRAIT": "veo_3_1_r2v_fast_landscape_ultra_relaxed" + "VIDEO_ASPECT_RATIO_PORTRAIT": "veo_3_1_r2v_fast_portrait_ultra_relaxed" } }, "PAYGATE_TIER_ONE": { diff --git a/agent/models/enums.py b/agent/models/enums.py index 8876ab6..f536625 100644 --- a/agent/models/enums.py +++ b/agent/models/enums.py @@ -2,7 +2,7 @@ RequestType = Literal[ "GENERATE_IMAGE", "REGENERATE_IMAGE", "EDIT_IMAGE", - "GENERATE_VIDEO", "REGENERATE_VIDEO", "GENERATE_VIDEO_REFS", "UPSCALE_VIDEO", + "GENERATE_VIDEO", "REGENERATE_VIDEO", "GENERATE_VIDEO_REFS", "UPSCALE_VIDEO", "UPSCALE_VIDEO_LOCAL", "GENERATE_CHARACTER_IMAGE", "REGENERATE_CHARACTER_IMAGE", "EDIT_CHARACTER_IMAGE", ] diff --git a/agent/models/project.py b/agent/models/project.py index 1e266b9..958f7ea 100644 --- a/agent/models/project.py +++ b/agent/models/project.py @@ -18,8 +18,9 @@ class ProjectCreate(BaseModel): language: str = "en" user_paygate_tier: PaygateTier = "PAYGATE_TIER_ONE" tool_name: str = "PINHOLE" - material: str = Field("realistic", pattern=r"^[a-z0-9][a-z0-9_]{1,63}$") # material ID from GET /api/materials - style: Optional[str] = None # deprecated: use material instead; "3D"→"3d_pixar", "photorealistic"→"realistic" + material: str = Field("realistic", pattern=r"^[a-z0-9][a-z0-9_]{1,63}$") + orientation: str = "VERTICAL" # VERTICAL=9:16, HORIZONTAL=16:9 + style: Optional[str] = None allow_music: bool = False # when True, skip "no background music" suffix in video prompts allow_voice: bool = False # when True, keep character dialogue in video audio (suppress only music/narration) characters: Optional[list[CharacterInput]] = None @@ -47,6 +48,7 @@ class ProjectUpdate(BaseModel): narrator_voice: Optional[str] = None narrator_ref_audio: Optional[str] = None material: Optional[str] = None + orientation: Optional[str] = None allow_music: Optional[bool] = None allow_voice: Optional[bool] = None @@ -61,6 +63,7 @@ class Project(BaseModel): status: str = "ACTIVE" user_paygate_tier: str = "PAYGATE_TIER_ONE" material: Optional[str] = None + orientation: str = "VERTICAL" allow_music: bool = False allow_voice: bool = False narrator_voice: Optional[str] = None diff --git a/agent/models/request.py b/agent/models/request.py index 80b7a4b..27bb96d 100644 --- a/agent/models/request.py +++ b/agent/models/request.py @@ -21,7 +21,7 @@ def check_required_fields(self) -> "RequestCreate": if not self.project_id: raise ValueError(f"project_id is required for {req_type}") elif req_type in ("GENERATE_IMAGE", "REGENERATE_IMAGE", "EDIT_IMAGE", - "GENERATE_VIDEO", "REGENERATE_VIDEO", "GENERATE_VIDEO_REFS", "UPSCALE_VIDEO"): + "GENERATE_VIDEO", "REGENERATE_VIDEO", "GENERATE_VIDEO_REFS", "UPSCALE_VIDEO", "UPSCALE_VIDEO_LOCAL"): if not self.scene_id: raise ValueError(f"scene_id is required for {req_type}") if not self.project_id: @@ -45,6 +45,7 @@ class Request(BaseModel): output_url: Optional[str] = None error_message: Optional[str] = None retry_count: int = 0 + next_retry_at: Optional[str] = None source_media_id: Optional[str] = None created_at: Optional[str] = None updated_at: Optional[str] = None diff --git a/agent/models/scene.py b/agent/models/scene.py index 82d9c23..e57981d 100644 --- a/agent/models/scene.py +++ b/agent/models/scene.py @@ -14,6 +14,7 @@ class SceneCreate(BaseModel): parent_scene_id: Optional[str] = None chain_type: ChainType = "ROOT" source: Optional[SceneSource] = None + narrator_text: Optional[str] = None # <-- allows narrator text at creation time class SceneUpdate(BaseModel): @@ -98,6 +99,8 @@ class Scene(BaseModel): trim_end: Optional[float] = None duration: Optional[float] = None narrator_text: Optional[str] = None + tts_status: str = "PENDING" + tts_audio_path: Optional[str] = None created_at: Optional[str] = None updated_at: Optional[str] = None diff --git a/agent/models/tts.py b/agent/models/tts.py index 6e29758..4a4e8e6 100644 --- a/agent/models/tts.py +++ b/agent/models/tts.py @@ -8,6 +8,8 @@ class TTSGenerateRequest(BaseModel): instruct: Optional[str] = Field(None, max_length=200) ref_audio: Optional[str] = Field(None, max_length=500) ref_text: Optional[str] = None + voice_id: Optional[str] = Field(None, min_length=1, max_length=128) + model_id: Optional[str] = Field(None, min_length=1, max_length=128) speed: float = Field(1.0, ge=0.5, le=3.0) @@ -25,6 +27,8 @@ class NarrateVideoRequest(BaseModel): ref_audio: Optional[str] = Field(None, max_length=500) # Path to voice template WAV ref_text: Optional[str] = None # Transcript of ref_audio (auto-resolved from template) template: Optional[str] = Field(None, pattern=r"^[a-zA-Z0-9_-]{1,64}$") # Voice template name + voice_id: Optional[str] = Field(None, min_length=1, max_length=128) + model_id: Optional[str] = Field(None, min_length=1, max_length=128) mix: bool = True sfx_volume: float = Field(0.4, ge=0.0, le=2.0) from_scene: Optional[int] = Field(None, ge=0) # Start display_order (inclusive) @@ -55,14 +59,29 @@ class VoiceTemplateRequest(BaseModel): text: str = Field(..., min_length=1, max_length=5000) # Sample text to generate voice template instruct: str = Field(..., max_length=200) # Voice design: "male, low pitch, young adult" name: str = Field("voice_template_1", pattern=r"^[a-zA-Z0-9_-]{1,64}$") # Template name for saving + voice_id: Optional[str] = Field(None, min_length=1, max_length=128) + model_id: Optional[str] = Field(None, min_length=1, max_length=128) speed: float = Field(1.0, ge=0.5, le=3.0) +class VoiceTemplateImportRequest(BaseModel): + """Import an existing local audio file as a reusable voice template.""" + name: str = Field(..., pattern=r"^[a-zA-Z0-9_-]{1,64}$") + audio_path: str = Field(..., min_length=1, max_length=500) + text: str = Field(..., min_length=1, max_length=5000) + instruct: str = Field("", max_length=200) + voice_id: Optional[str] = Field(None, min_length=1, max_length=128) + model_id: Optional[str] = Field(None, min_length=1, max_length=128) + copy_audio: bool = True + + class VoiceTemplateResponse(BaseModel): name: str audio_path: str text: str instruct: str + voice_id: Optional[str] = None + model_id: Optional[str] = None duration: Optional[float] = None sample_rate: int = 24000 @@ -70,4 +89,51 @@ class VoiceTemplateResponse(BaseModel): class VoiceTemplateListItem(BaseModel): name: str audio_path: str + voice_id: Optional[str] = None + model_id: Optional[str] = None duration: Optional[float] = None + + +class TTSSettingsResponse(BaseModel): + provider: Literal["elevenlabs", "omnivoice"] + elevenlabs_api_base: str + elevenlabs_model_id: str + elevenlabs_default_voice_id: str + elevenlabs_timeout_sec: float + elevenlabs_max_retries: int + elevenlabs_api_key_set: bool + elevenlabs_api_key_masked: str = "" + + +class TTSSettingsUpdateRequest(BaseModel): + provider: Optional[Literal["elevenlabs", "omnivoice"]] = None + elevenlabs_api_base: Optional[str] = Field(None, max_length=300) + elevenlabs_api_key: Optional[str] = Field(None, max_length=300) + clear_elevenlabs_api_key: bool = False + elevenlabs_model_id: Optional[str] = Field(None, max_length=128) + elevenlabs_default_voice_id: Optional[str] = Field(None, max_length=128) + elevenlabs_timeout_sec: Optional[float] = Field(None, ge=5, le=300) + elevenlabs_max_retries: Optional[int] = Field(None, ge=0, le=10) + + +class TTSModelOption(BaseModel): + model_id: str + name: str + description: str = "" + language_count: int = 0 + + +class TTSVoiceOption(BaseModel): + voice_id: str + name: str + category: str = "" + preview_url: Optional[str] = None + labels: dict[str, str] = Field(default_factory=dict) + + +class TTSCatalogResponse(BaseModel): + provider: Literal["elevenlabs", "omnivoice"] + source: Literal["api", "fallback", "mixed"] = "fallback" + models: list[TTSModelOption] = Field(default_factory=list) + voices: list[TTSVoiceOption] = Field(default_factory=list) + warnings: list[str] = Field(default_factory=list) diff --git a/agent/sdk/models/project.py b/agent/sdk/models/project.py index 003f2ea..a558d79 100644 --- a/agent/sdk/models/project.py +++ b/agent/sdk/models/project.py @@ -27,6 +27,7 @@ class Project(DomainModel): status: str = "ACTIVE" user_paygate_tier: str = "PAYGATE_TIER_ONE" material: Optional[str] = None + orientation: str = "VERTICAL" allow_music: bool = False allow_voice: bool = False narrator_voice: Optional[str] = None diff --git a/agent/sdk/models/scene.py b/agent/sdk/models/scene.py index ad631ed..4574d4d 100644 --- a/agent/sdk/models/scene.py +++ b/agent/sdk/models/scene.py @@ -199,7 +199,7 @@ async def upscale_video( project_id: str, video_id: Optional[str] = None, ) -> str: - """Submit an UPSCALE_VIDEO request. Returns the request id.""" + """Submit a local upscale request. Returns the request id.""" from agent.sdk.services.operations import get_operations ops = get_operations() diff --git a/agent/sdk/persistence/sqlite_repository.py b/agent/sdk/persistence/sqlite_repository.py index 368d4c8..a908f5c 100644 --- a/agent/sdk/persistence/sqlite_repository.py +++ b/agent/sdk/persistence/sqlite_repository.py @@ -32,6 +32,7 @@ def _row_to_project(self, row: dict[str, Any]) -> Project: status=row.get("status", "ACTIVE"), user_paygate_tier=row.get("user_paygate_tier", "PAYGATE_TIER_ONE"), material=row.get("material"), + orientation=row.get("orientation", "VERTICAL"), allow_music=bool(row.get("allow_music", 0)), allow_voice=bool(row.get("allow_voice", 0)), narrator_voice=row.get("narrator_voice"), @@ -189,6 +190,7 @@ async def save_project(self, project: Project) -> None: language=project.language, status=project.status, user_paygate_tier=project.user_paygate_tier, + orientation=project.orientation, allow_music=int(project.allow_music), allow_voice=int(project.allow_voice), ) @@ -203,6 +205,7 @@ async def create_project( user_paygate_tier: str = "PAYGATE_TIER_ONE", id: Optional[str] = None, material: Optional[str] = None, + orientation: str = "VERTICAL", allow_music: bool = False, allow_voice: bool = False, ) -> Project: @@ -214,6 +217,7 @@ async def create_project( user_paygate_tier=user_paygate_tier, id=id, material=material, + orientation=orientation, allow_music=allow_music, allow_voice=allow_voice, ) @@ -358,6 +362,7 @@ async def create_scene( parent_scene_id: Optional[str] = None, chain_type: str = "ROOT", source: str = "root", + narrator_text: Optional[str] = None, ) -> Scene: row = await crud.create_scene( video_id=video_id, @@ -370,6 +375,7 @@ async def create_scene( parent_scene_id=parent_scene_id, chain_type=chain_type, source=source, + narrator_text=narrator_text, ) return self._row_to_scene(row) diff --git a/agent/sdk/services/operations.py b/agent/sdk/services/operations.py index 99c8f83..6db05e8 100644 --- a/agent/sdk/services/operations.py +++ b/agent/sdk/services/operations.py @@ -11,8 +11,9 @@ import base64 import json import logging +import re import ssl -from typing import TYPE_CHECKING, Optional +from typing import TYPE_CHECKING, Optional, Awaitable, Callable def _build_continuation_prompt(base_prompt: str) -> str: @@ -31,11 +32,192 @@ def _build_continuation_prompt(base_prompt: str) -> str: ) -def _char_matches(c: dict, name_set: set) -> bool: - """Check if a character matches any name in the set by slug OR display name.""" - slug = c.get("slug") or "" - name = c.get("name", "") - return (slug and slug in name_set) or (name and name in name_set) +def _normalized_name(value: str) -> str: + return slugify((value or "").strip()).lower() + + +def _char_matches(c: dict, name_set: set[str]) -> bool: + """Check if an entity matches by slug/name (case-insensitive, slug-aware).""" + normalized_set = {_normalized_name(str(x)) for x in name_set if str(x).strip()} + slug = _normalized_name(c.get("slug") or "") + name = _normalized_name(c.get("name", "")) + return bool((slug and slug in normalized_set) or (name and name in normalized_set)) + + +def _char_mentioned_in_text(c: dict, text: str) -> bool: + """Fallback matcher when scene.character_names is missing.""" + if not text: + return False + hay = text.lower() + slug = (c.get("slug") or "").strip().lower() + name = (c.get("name") or "").strip().lower() + return bool((slug and slug in hay) or (name and name in hay)) + + +def _with_reference_lock(prompt: str, ref_names: list[str]) -> str: + """When refs exist, force model to follow refs and ignore conflicting appearance text.""" + p = (prompt or "").strip() + if not ref_names: + return p + ref_list = ", ".join(ref_names) + lock = ( + f"STRICT CHARACTER CONSISTENCY MODE for [{ref_list}]. " + "Use provided reference images as the only source for character identity and appearance " + "(face, body, clothing, colors, proportions, style). " + "Do NOT redesign or reinterpret character looks. " + "Ignore all conflicting appearance text; prompt text controls only action, camera, environment, and mood." + ) + return f"{lock} {p}".strip() + + +_UNSAFE_ERROR_MARKERS = ( + "public_error_unsafe_generation", + "unsafe_generation", + "unsafe generation", +) + +# Keep this intentionally broad so we can neutralize risky phrases in both EN + VI prompts. +_UNSAFE_TERM_RE = re.compile( + r"\b(" + r"kill|killing|murder|blood|bloody|gore|gory|corpse|dead body|behead|decapitat|" + r"execution|torture|rape|sexual|nude|nudity|suicide|terrorist|" + r"giết|máu|đẫm máu|chặt đầu|hành quyết|tra tấn|cưỡng hiếp|khỏa thân|tự sát|khủng bố" + r")\b", + re.IGNORECASE, +) + + +def _extract_error_text(result: dict) -> str: + if not isinstance(result, dict): + return str(result) + if result.get("error"): + return str(result["error"]) + data = result.get("data", {}) + if isinstance(data, dict): + err = data.get("error") + if isinstance(err, dict): + msg = str(err.get("message") or json.dumps(err)[:240]) + details = err.get("details") + if isinstance(details, list): + for detail in details: + if isinstance(detail, dict): + reason = detail.get("reason") + if reason: + msg = f"{msg} [{reason}]" + break + return msg + if err: + return str(err) + return "" + + +def _is_unsafe_generation_error(result: dict) -> bool: + low = _extract_error_text(result).lower() + return any(marker in low for marker in _UNSAFE_ERROR_MARKERS) + + +def _sanitize_prompt_for_safety(prompt: str) -> str: + raw = " ".join(str(prompt or "").split()) + if not raw: + raw = "Cinematic documentary environment shot with neutral action." + softened = _UNSAFE_TERM_RE.sub("dramatic", raw).strip() + safety_guard = ( + "Family-friendly documentary visual. Non-graphic, non-violent, non-sexual, " + "no explicit injury, no blood, no hate symbols, no real-person likeness. " + "Focus on environment, camera angle, lighting, and neutral action only." + ) + merged = f"{safety_guard} {softened}".strip() + return merged[:1400] + + +async def _run_image_with_safe_fallback( + *, + prompt: str, + context: str, + call_with_prompt: Callable[[str], Awaitable[dict]], +) -> dict: + result = await call_with_prompt(prompt) + if not (_is_error(result) and _is_unsafe_generation_error(result)): + return result + + safe_prompt = _sanitize_prompt_for_safety(prompt) + logger.warning("%s blocked by safety filter, retrying with sanitized prompt", context) + retry = await call_with_prompt(safe_prompt) + if not _is_error(retry): + logger.info("%s recovered after safe-prompt retry", context) + return retry + if _is_unsafe_generation_error(retry): + return { + "error": ( + "Request blocked by Google safety filter [PUBLIC_ERROR_UNSAFE_GENERATION]. " + "Da thu auto-safe prompt 1 lan nhung van bi chan. " + "Hay giam noi dung nhay cam/bao luc/18+ va thu lai." + ) + } + return retry + + +async def _resolve_scene_ref_media_ids(scene: dict, project_id: str) -> tuple[list[str], list[str], list[str]]: + """Resolve available reference media_ids for scene entities. + + Rule: + - If a character/entity has uploaded/generated media_id => always use as reference. + - If no media_id => fallback to prompt text (do not block generation). + """ + char_names_raw = scene.get("character_names") + if isinstance(char_names_raw, str): + try: + char_names_raw = json.loads(char_names_raw) + except json.JSONDecodeError: + char_names_raw = [] + if not isinstance(char_names_raw, list): + char_names_raw = [] + + name_set = {_normalized_name(str(x)) for x in char_names_raw if str(x).strip()} + prompt_blob = " ".join( + str(scene.get(k) or "") for k in ("image_prompt", "prompt", "video_prompt", "narrator_text") + ) + + project_chars = await crud.get_project_characters(project_id) + valid_ids: list[str] = [] + ref_names: list[str] = [] + missing_names: list[str] = [] + seen_ids: set[str] = set() + matched_any = False + + for c in project_chars: + matched = _char_matches(c, name_set) if name_set else _char_mentioned_in_text(c, prompt_blob) + if not matched: + continue + matched_any = True + mid = c.get("media_id") + if mid and mid not in seen_ids: + valid_ids.append(mid) + ref_names.append(c.get("name") or c.get("slug") or "entity") + seen_ids.add(mid) + elif not mid and name_set: + missing_names.append(c.get("name") or c.get("slug") or "entity") + + # Hard fallback: if no explicit mapping worked and project has exactly one + # character ref, force using that ref to keep character consistency. + if not matched_any and not valid_ids: + single_ref_chars = [ + c for c in project_chars + if c.get("entity_type") == "character" and c.get("media_id") + ] + if len(single_ref_chars) == 1: + c = single_ref_chars[0] + mid = c.get("media_id") + if mid and mid not in seen_ids: + valid_ids.append(mid) + ref_names.append(c.get("name") or c.get("slug") or "character") + seen_ids.add(mid) + + # De-duplicate while preserving order + if missing_names: + missing_names = list(dict.fromkeys(missing_names)) + + return valid_ids, ref_names, missing_names import aiohttp @@ -162,6 +344,57 @@ async def _poll_operations( return {"error": f"Polling timeout after {timeout}s"} +async def _check_operations_once( + client: FlowClient, + operations: list[dict], + *, + pending_retry_sec: int = 8, +) -> dict: + """Single status-check pass for queue mode. + + Returns: + - {"data": ...} when all operations are SUCCESSFUL + - {"error": "..."} when any operation FAILED + - {"pending": True, "retry_after_sec": N, ...} while still processing + """ + if not operations: + return {"error": "No operations to check"} + + status_result = await client.check_video_status(operations) + if _is_error(status_result): + return status_result + + data = status_result.get("data", status_result) + ops = data.get("operations", []) + if not ops: + return { + "pending": True, + "retry_after_sec": pending_retry_sec, + "message": "Waiting for operation status", + } + + all_done = True + for op in ops: + status = op.get("status", "") + if status == "MEDIA_GENERATION_STATUS_SUCCESSFUL": + continue + if status == "MEDIA_GENERATION_STATUS_FAILED": + op_name = op.get("operation", {}).get("name", "?") + return {"error": f"Operation failed: {op_name}"} + all_done = False + + if all_done: + return {"data": data} + + done_count = sum(1 for o in ops if o.get("status") == "MEDIA_GENERATION_STATUS_SUCCESSFUL") + return { + "pending": True, + "retry_after_sec": pending_retry_sec, + "message": f"Video generation in progress ({done_count}/{len(ops)} done)", + "data": {"operations": ops}, + } + + class OperationService: """Executes media generation operations using FlowClient + Repository. @@ -190,40 +423,30 @@ async def generate_scene_image(self, scene: dict, orientation: str) -> dict: # Resolve character reference media_ids char_media_ids = None - char_names_raw = scene.get("character_names") - if char_names_raw and pid: - if isinstance(char_names_raw, str): - try: - char_names_raw = json.loads(char_names_raw) - except json.JSONDecodeError: - char_names_raw = [] - if not isinstance(char_names_raw, list): - char_names_raw = [] - if char_names_raw: - project_chars = await crud.get_project_characters(pid) - valid_ids = [] - missing_refs = [] - char_names_set = set(char_names_raw) - for c in project_chars: - if not _char_matches(c, char_names_set): - continue - mid = c.get("media_id") - if mid: - valid_ids.append(mid) - else: - missing_refs.append(c.get("slug") or c["name"]) - - if missing_refs: - return {"error": f"Waiting for reference images: {', '.join(missing_refs)}"} - - char_media_ids = valid_ids if valid_ids else None - if char_media_ids: - logger.info("Scene %s: using %d reference images", - scene.get("id", "?")[:8], len(char_media_ids)) - - return await self._client.generate_images( - prompt=prompt, project_id=pid, aspect_ratio=aspect, - user_paygate_tier=tier, character_media_ids=char_media_ids, + if pid: + valid_ids, ref_names, missing_names = await _resolve_scene_ref_media_ids(scene, pid) + if missing_names: + return {"error": f"Missing reference images for: {', '.join(missing_names)}"} + char_media_ids = valid_ids if valid_ids else None + if char_media_ids: + prompt = _with_reference_lock(prompt, ref_names) + logger.info( + "Scene %s: using %d uploaded refs [%s]", + scene.get("id", "?")[:8], + len(char_media_ids), + ", ".join(ref_names[:4]), + ) + + return await _run_image_with_safe_fallback( + prompt=prompt, + context=f"Scene image {scene.get('id', '?')[:8]}", + call_with_prompt=lambda p: self._client.generate_images( + prompt=p, + project_id=pid, + aspect_ratio=aspect, + user_paygate_tier=tier, + character_media_ids=char_media_ids, + ), ) async def edit_scene_image(self, scene: dict, orientation: str, @@ -259,27 +482,25 @@ async def edit_scene_image(self, scene: dict, orientation: str, # Resolve character reference media_ids for edit consistency char_media_ids = None - char_names_raw = scene.get("character_names") - if char_names_raw and pid: - if isinstance(char_names_raw, str): - try: - char_names_raw = json.loads(char_names_raw) - except json.JSONDecodeError: - char_names_raw = [] - if isinstance(char_names_raw, list) and char_names_raw: - project_chars = await crud.get_project_characters(pid) - valid_ids = [] - char_names_set = set(char_names_raw) - for c in project_chars: - if _char_matches(c, char_names_set) and c.get("media_id"): - valid_ids.append(c["media_id"]) - char_media_ids = valid_ids if valid_ids else None - - return await self._client.edit_image( - prompt=edit_prompt, source_media_id=src, - project_id=pid, aspect_ratio=aspect, - user_paygate_tier=tier, - character_media_ids=char_media_ids, + if pid: + valid_ids, ref_names, missing_names = await _resolve_scene_ref_media_ids(scene, pid) + if missing_names: + return {"error": f"Missing reference images for: {', '.join(missing_names)}"} + char_media_ids = valid_ids if valid_ids else None + if char_media_ids: + edit_prompt = _with_reference_lock(edit_prompt, ref_names) + + return await _run_image_with_safe_fallback( + prompt=edit_prompt, + context=f"Scene edit {scene.get('id', '?')[:8]}", + call_with_prompt=lambda p: self._client.edit_image( + prompt=p, + source_media_id=src, + project_id=pid, + aspect_ratio=aspect, + user_paygate_tier=tier, + character_media_ids=char_media_ids, + ), ) # ------------------------------------------------------------------ @@ -313,9 +534,13 @@ async def generate_scene_video(self, scene: dict, orientation: str, req_row = await crud.get_request(request_id) existing_op = req_row.get("request_id") if req_row else None + queue_mode = bool(request_id) + if existing_op: logger.info("Video gen already submitted (op=%s), re-polling", existing_op[:30]) operations = [{"operation": {"name": existing_op}, "status": "MEDIA_GENERATION_STATUS_PENDING"}] + if queue_mode: + return await _check_operations_once(self._client, operations, pending_retry_sec=max(8, VIDEO_POLL_INTERVAL)) return await _poll_operations(self._client, operations) submit_result = await self._client.generate_video( @@ -346,6 +571,14 @@ async def generate_scene_video(self, scene: dict, orientation: str, if status == "MEDIA_GENERATION_STATUS_FAILED": return {"error": "Video generation failed immediately"} + if queue_mode: + return { + "pending": True, + "retry_after_sec": max(8, VIDEO_POLL_INTERVAL), + "message": "Video submitted. Waiting for completion.", + "data": {"operations": operations}, + } + logger.info("Video gen submitted, polling %d operations...", len(operations)) return await _poll_operations(self._client, operations) @@ -425,9 +658,13 @@ async def generate_scene_video_refs(self, scene: dict, orientation: str, req_row = await crud.get_request(request_id) existing_op = req_row.get("request_id") if req_row else None + queue_mode = bool(request_id) + if existing_op: logger.info("R2V already submitted (op=%s), re-polling", existing_op[:30]) operations = [{"operation": {"name": existing_op}, "status": "MEDIA_GENERATION_STATUS_PENDING"}] + if queue_mode: + return await _check_operations_once(self._client, operations, pending_retry_sec=max(8, VIDEO_POLL_INTERVAL)) return await _poll_operations(self._client, operations) submit_result = await self._client.generate_video_from_references( @@ -457,6 +694,14 @@ async def generate_scene_video_refs(self, scene: dict, orientation: str, if status == "MEDIA_GENERATION_STATUS_FAILED": return {"error": "R2V failed immediately"} + if queue_mode: + return { + "pending": True, + "retry_after_sec": max(8, VIDEO_POLL_INTERVAL), + "message": "R2V submitted. Waiting for completion.", + "data": {"operations": operations}, + } + logger.info("R2V submitted with %d refs, polling %d operations...", len(ref_ids), len(operations)) return await _poll_operations(self._client, operations) @@ -480,10 +725,14 @@ async def upscale_scene_video(self, scene: dict, orientation: str, req_row = await crud.get_request(request_id) existing_op = req_row.get("request_id") if req_row else None + queue_mode = bool(request_id) + if existing_op: # Already submitted — just re-poll logger.info("Upscale already submitted (op=%s), re-polling", existing_op[:30]) operations = [{"operation": {"name": existing_op}, "status": "MEDIA_GENERATION_STATUS_PENDING"}] + if queue_mode: + return await _check_operations_once(self._client, operations, pending_retry_sec=max(8, VIDEO_POLL_INTERVAL)) return await _poll_operations(self._client, operations, timeout=300) submit_result = await self._client.upscale_video( @@ -524,6 +773,14 @@ async def upscale_scene_video(self, scene: dict, orientation: str, if status == "MEDIA_GENERATION_STATUS_FAILED": return {"error": "Upscale failed immediately"} + if queue_mode: + return { + "pending": True, + "retry_after_sec": max(8, VIDEO_POLL_INTERVAL), + "message": "Upscale submitted. Waiting for completion.", + "data": {"operations": operations}, + } + logger.info("Upscale submitted, polling %d operations...", len(operations)) poll_result = await _poll_operations(self._client, operations, timeout=300) @@ -552,6 +809,14 @@ async def generate_reference_image(self, char: dict, project_id: str) -> dict: entity_type = char.get("entity_type", "character") pid = project_id + # Idempotent path: if this entity already has a media_id, keep it. + # REGENERATE_CHARACTER_IMAGE clears media_id before calling this method. + existing_mid = char.get("media_id") + if existing_mid: + logger.info("%s '%s' already has media_id=%s, skip generate_reference_image", + entity_type, char.get("name", "?"), str(existing_mid)[:20]) + return {"data": {"media": [{"name": existing_mid}]}} + # Fast path: image already generated, just need upload for UUID existing_url = char.get("reference_image_url") if existing_url and not char.get("media_id"): @@ -584,9 +849,15 @@ async def generate_reference_image(self, char: dict, project_id: str) -> dict: tier = project.get("user_paygate_tier", "PAYGATE_TIER_TWO") if project else "PAYGATE_TIER_TWO" aspect = _reference_aspect_ratio(entity_type) - result = await self._client.generate_images( - prompt=prompt, project_id=pid, aspect_ratio=aspect, - user_paygate_tier=tier, + result = await _run_image_with_safe_fallback( + prompt=prompt, + context=f"Ref image {char.get('id', '?')[:8]}", + call_with_prompt=lambda p: self._client.generate_images( + prompt=p, + project_id=pid, + aspect_ratio=aspect, + user_paygate_tier=tier, + ), ) if not _is_error(result): @@ -682,10 +953,10 @@ async def queue_scene_video_refs(self, scene_id: str, project_id: str, async def queue_upscale_video(self, scene_id: str, project_id: str, video_id: str, orientation: str | None = None) -> str: - """Queue an UPSCALE_VIDEO request. Returns request id.""" + """Queue a local upscale request. Returns request id.""" orientation = await self._resolve_queue_orientation(video_id, orientation) row = await crud.create_request( - req_type="UPSCALE_VIDEO", orientation=orientation, + req_type="UPSCALE_VIDEO_LOCAL", orientation=orientation, scene_id=scene_id, project_id=project_id, video_id=video_id, ) return row["id"] diff --git a/agent/sdk/services/result_handler.py b/agent/sdk/services/result_handler.py index 23d40aa..e0e8150 100644 --- a/agent/sdk/services/result_handler.py +++ b/agent/sdk/services/result_handler.py @@ -1,14 +1,255 @@ """Shared result parsing + DB update helpers for SDK direct execution and background processor.""" from __future__ import annotations +import logging +from pathlib import Path from typing import TYPE_CHECKING +from urllib.parse import quote, urlparse +import aiohttp + +from agent.config import API_HOST, API_PORT, OUTPUT_DIR from agent.db import crud -from agent.worker._parsing import _is_error, _extract_media_id, _extract_output_url +from agent.utils.paths import scene_filename +from agent.utils.slugify import slugify +from agent.worker._parsing import ( + _extract_media_id, + _extract_output_url, + _is_direct_media_url, + _is_error, +) if TYPE_CHECKING: from agent.sdk.models.media import GenerationResult +logger = logging.getLogger(__name__) + +_API_PUBLIC_HOST = "127.0.0.1" if API_HOST in {"0.0.0.0", "::"} else API_HOST +_LOCAL_MEDIA_PROXY_BASE = f"http://{_API_PUBLIC_HOST}:{API_PORT}/api/flow/local-media" +_IMAGE_EXT_BY_MIME = { + "image/jpeg": "jpg", + "image/jpg": "jpg", + "image/png": "png", + "image/webp": "webp", + "image/gif": "gif", + "image/bmp": "bmp", + "image/avif": "avif", +} +_VIDEO_EXT_BY_MIME = { + "video/mp4": "mp4", + "video/webm": "webm", + "video/quicktime": "mov", + "video/x-matroska": "mkv", +} + + +def _build_local_media_proxy_url(path: Path) -> str: + encoded = quote(str(path), safe="") + return f"{_LOCAL_MEDIA_PROXY_BASE}?path={encoded}" + + +def _is_persistable_local_url(url: str | None) -> bool: + if not isinstance(url, str): + return False + text = url.strip() + if not text: + return False + if text.startswith("http://") or text.startswith("https://"): + try: + parsed = urlparse(text) + if (parsed.hostname or "").lower() not in {"127.0.0.1", "localhost"}: + return False + return parsed.path.rstrip("/") == "/api/flow/local-media" + except Exception: + return False + if text.startswith("file://"): + return True + return Path(text).is_absolute() + + +def _guess_media_ext(url: str, content_type: str | None, kind: str) -> str: + if content_type: + mime = content_type.split(";", 1)[0].strip().lower() + if kind == "video" and mime in _VIDEO_EXT_BY_MIME: + return _VIDEO_EXT_BY_MIME[mime] + if kind == "image" and mime in _IMAGE_EXT_BY_MIME: + return _IMAGE_EXT_BY_MIME[mime] + try: + parsed = urlparse(url) + suffix = Path(parsed.path).suffix.lower().lstrip(".") + if kind == "video" and suffix in {"mp4", "webm", "mov", "mkv"}: + return suffix + if kind == "image" and suffix in {"jpg", "jpeg", "png", "webp", "gif", "bmp", "avif"}: + return "jpg" if suffix == "jpeg" else suffix + except Exception: + pass + return "mp4" if kind == "video" else "png" + + +async def _download_media(url: str, target_base_path: Path, kind: str = "image") -> Path | None: + target_base_path.parent.mkdir(parents=True, exist_ok=True) + try: + connector = aiohttp.TCPConnector(ssl=False) + timeout = aiohttp.ClientTimeout(total=420 if kind == "video" else 180) + async with aiohttp.ClientSession(connector=connector, timeout=timeout) as session: + async with session.get(url) as resp: + if resp.status != 200: + return None + ext = _guess_media_ext(url, resp.headers.get("content-type"), kind) + target_path = target_base_path.with_suffix(f".{ext}") + target_path.write_bytes(await resp.read()) + return target_path + except Exception as exc: + logger.warning("Failed downloading local %s copy from %s: %s", kind, url, exc) + return None + + +async def _resolve_scene_local_image_url(scene_id: str, orientation: str, remote_url: str | None) -> str | None: + if not _is_direct_media_url(remote_url): + return None + scene = await crud.get_scene(scene_id) + if not scene: + return None + video = await crud.get_video(scene.get("video_id")) if scene.get("video_id") else None + project = await crud.get_project(video.get("project_id")) if video and video.get("project_id") else None + + project_seed = ( + (project or {}).get("name") + or (video or {}).get("project_id") + or scene.get("video_id") + or "project" + ) + project_slug = slugify(str(project_seed)) or "project" + axis = "vertical" if orientation == "VERTICAL" else "horizontal" + display_order = int(scene.get("display_order") or 0) + 1 + canonical_name = scene_filename(display_order, scene_id, ext="png") + local_path = OUTPUT_DIR / project_slug / "images" / axis / canonical_name + + downloaded = await _download_media(remote_url, local_path, "image") + if not downloaded: + return None + return _build_local_media_proxy_url(downloaded) + + +async def _resolve_scene_local_video_url( + scene_id: str, + orientation: str, + remote_url: str | None, + *, + kind: str = "video", +) -> str | None: + if not _is_direct_media_url(remote_url): + return None + scene = await crud.get_scene(scene_id) + if not scene: + return None + video = await crud.get_video(scene.get("video_id")) if scene.get("video_id") else None + project = await crud.get_project(video.get("project_id")) if video and video.get("project_id") else None + + project_seed = ( + (project or {}).get("name") + or (video or {}).get("project_id") + or scene.get("video_id") + or "project" + ) + project_slug = slugify(str(project_seed)) or "project" + axis = "vertical" if orientation == "VERTICAL" else "horizontal" + display_order = int(scene.get("display_order") or 0) + 1 + canonical_name = scene_filename(display_order, scene_id, ext="mp4") + subdir = "videos" if kind == "video" else "upscale" + local_path = OUTPUT_DIR / project_slug / subdir / axis / canonical_name + + downloaded = await _download_media(remote_url, local_path, "video") + if not downloaded: + return None + return _build_local_media_proxy_url(downloaded) + + +async def _resolve_character_local_image_url(character_id: str, remote_url: str | None) -> str | None: + if not _is_direct_media_url(remote_url): + return None + char = await crud.get_character(character_id) + slug = slugify(str((char or {}).get("name") or "character")) or "character" + local_path = OUTPUT_DIR / "_shared" / "refs" / f"{slug}_{character_id}.png" + downloaded = await _download_media(remote_url, local_path, "image") + if not downloaded: + return None + return _build_local_media_proxy_url(downloaded) + + +def _extract_first_direct_url(payload: object) -> str | None: + candidates: list[str] = [] + + def walk(node: object) -> None: + if isinstance(node, dict): + for key in ("fifeUrl", "servingUri", "url", "imageUri", "videoUri"): + value = node.get(key) + if isinstance(value, str): + candidates.append(value) + for value in node.values(): + walk(value) + elif isinstance(node, list): + for value in node: + walk(value) + + walk(payload) + for url in candidates: + if _is_direct_media_url(url): + return url + return None + + +async def _fallback_local_cache_via_media_id( + media_id: str | None, + remote_url_hint: str | None, + *, + project_id: str | None = None, +) -> str | None: + if not media_id: + return None + try: + from agent.services.flow_client import get_flow_client + + client = get_flow_client() + normalized_project_id = str(project_id or "").strip().lower() or None + + if _is_direct_media_url(remote_url_hint): + cached = await client.cache_media_locally( + media_id, + remote_url_hint, + project_id=normalized_project_id, + ) + if cached: + return cached + + if not client.connected: + return None + + media_resp = await client.get_media( + media_id, + project_id=normalized_project_id, + timeout_sec=18, + ) + if media_resp.get("error"): + return None + status = media_resp.get("status", 200) + if isinstance(status, int) and status >= 400: + return None + + payload = media_resp.get("data", media_resp) + fresh_url = _extract_first_direct_url(payload) + if not _is_direct_media_url(fresh_url): + return None + cached = await client.cache_media_locally( + media_id, + fresh_url, + project_id=normalized_project_id, + ) + return cached or fresh_url + except Exception as exc: + logger.debug("Fallback local cache failed for media %s: %s", str(media_id)[:12], exc) + return None + def parse_result(raw: dict, req_type: str) -> GenerationResult: """Parse a raw FlowClient/OperationService response into a GenerationResult.""" @@ -45,12 +286,34 @@ async def apply_scene_result( return p = "vertical" if orientation == "VERTICAL" else "horizontal" + direct_url = result.url if _is_direct_media_url(result.url) else None + persisted_url = result.url if _is_persistable_local_url(result.url) else direct_url updates = {} + scene_cache: dict | None = None + project_id_cache: str | None = None + + async def _scene_project_id() -> str | None: + nonlocal scene_cache, project_id_cache + if project_id_cache is not None: + return project_id_cache or None + scene_cache = scene_cache or await crud.get_scene(scene_id) + video = await crud.get_video(scene_cache.get("video_id")) if scene_cache and scene_cache.get("video_id") else None + project_id_cache = str((video or {}).get("project_id") or "").strip() + return project_id_cache or None if req_type in ("GENERATE_IMAGE", "REGENERATE_IMAGE", "EDIT_IMAGE"): + local_proxy_url = await _resolve_scene_local_image_url(scene_id, orientation, direct_url) + if (not local_proxy_url) and result.media_id: + local_proxy_url = await _fallback_local_cache_via_media_id( + result.media_id, + direct_url, + project_id=await _scene_project_id(), + ) + if local_proxy_url: + persisted_url = local_proxy_url updates.update({ f"{p}_image_media_id": result.media_id, - f"{p}_image_url": result.url, + f"{p}_image_url": persisted_url, f"{p}_image_status": "COMPLETED", # Cascade: clear downstream f"{p}_video_media_id": None, f"{p}_video_url": None, f"{p}_video_status": "PENDING", @@ -58,24 +321,42 @@ async def apply_scene_result( }) # Chain cascade: update parent's end_scene_media_id so its video # transitions to this child's new image - scene = await crud.get_scene(scene_id) + scene = scene_cache or await crud.get_scene(scene_id) if scene and scene.get("parent_scene_id") and result.media_id: await crud.update_scene( scene["parent_scene_id"], **{f"{p}_end_scene_media_id": result.media_id}, ) elif req_type in ("GENERATE_VIDEO", "REGENERATE_VIDEO", "GENERATE_VIDEO_REFS"): + local_proxy_url = await _resolve_scene_local_video_url(scene_id, orientation, direct_url, kind="video") + if (not local_proxy_url) and result.media_id: + local_proxy_url = await _fallback_local_cache_via_media_id( + result.media_id, + direct_url, + project_id=await _scene_project_id(), + ) + if local_proxy_url: + persisted_url = local_proxy_url updates.update({ f"{p}_video_media_id": result.media_id, - f"{p}_video_url": result.url, + f"{p}_video_url": persisted_url, f"{p}_video_status": "COMPLETED", # Cascade: clear upscale f"{p}_upscale_media_id": None, f"{p}_upscale_url": None, f"{p}_upscale_status": "PENDING", }) - elif req_type == "UPSCALE_VIDEO": + elif req_type in ("UPSCALE_VIDEO", "UPSCALE_VIDEO_LOCAL"): + local_proxy_url = await _resolve_scene_local_video_url(scene_id, orientation, direct_url, kind="upscale") + if (not local_proxy_url) and result.media_id: + local_proxy_url = await _fallback_local_cache_via_media_id( + result.media_id, + direct_url, + project_id=await _scene_project_id(), + ) + if local_proxy_url: + persisted_url = local_proxy_url updates.update({ f"{p}_upscale_media_id": result.media_id, - f"{p}_upscale_url": result.url, + f"{p}_upscale_url": persisted_url, f"{p}_upscale_status": "COMPLETED", }) @@ -93,7 +374,17 @@ async def apply_character_result( updates = {} if result.media_id: updates["media_id"] = result.media_id - if result.url: - updates["reference_image_url"] = result.url + direct_url = result.url if _is_direct_media_url(result.url) else None + local_ref: str | None = None + if direct_url: + local_ref = await _resolve_character_local_image_url(character_id, direct_url) + if (not local_ref) and result.media_id: + local_ref = await _fallback_local_cache_via_media_id( + result.media_id, + direct_url, + project_id=None, + ) + if local_ref or direct_url: + updates["reference_image_url"] = local_ref or direct_url if updates: await crud.update_character(character_id, **updates) diff --git a/agent/services/flow_client.py b/agent/services/flow_client.py index 8b7a78b..91a3f50 100644 --- a/agent/services/flow_client.py +++ b/agent/services/flow_client.py @@ -7,17 +7,448 @@ import asyncio import json import logging +import re +import shutil import time import uuid +from collections import deque +from pathlib import Path from typing import Optional +from urllib.parse import quote +from urllib.parse import parse_qs, unquote, urlparse + +import aiohttp from agent.config import ( GOOGLE_FLOW_API, GOOGLE_API_KEY, ENDPOINTS, VIDEO_MODELS, UPSCALE_MODELS, IMAGE_MODELS, VIDEO_POLL_TIMEOUT, + API_HOST, API_PORT, OUTPUT_DIR, + FLOW_CREDITS_CACHE_TTL_SEC, FLOW_CREDITS_ERROR_TTL_SEC, + TIER_SYNC_MIN_INTERVAL_SEC, ) -from agent.services.headers import random_headers +from agent.utils.paths import scene_filename +from agent.utils.slugify import slugify +from agent.services.headers import random_headers, read_headers logger = logging.getLogger(__name__) +_UNICODE_ESCAPE_RE = re.compile(r"\\u([0-9a-fA-F]{4})") + + +def _decode_escaped_text(raw: str) -> str: + """Decode common JSON-escaped URL text emitted by Flow payloads.""" + if not isinstance(raw, str): + return "" + text = raw.replace("\\/", "/") + return _UNICODE_ESCAPE_RE.sub(lambda m: chr(int(m.group(1), 16)), text) + + +def _declared_aspect_in_model_key(model_key: str) -> str | None: + """Infer declared aspect from model key naming (portrait/landscape).""" + low = (model_key or "").lower() + has_portrait = "portrait" in low + has_landscape = "landscape" in low + if has_portrait and not has_landscape: + return "portrait" + if has_landscape and not has_portrait: + return "landscape" + return None + + +def _wanted_aspect_name(aspect_ratio: str) -> str: + return "portrait" if str(aspect_ratio or "").upper().endswith("PORTRAIT") else "landscape" + + +def _model_matches_aspect(model_key: str, aspect_ratio: str) -> bool: + declared = _declared_aspect_in_model_key(model_key) + if not declared: + return True + return declared == _wanted_aspect_name(aspect_ratio) + + +def _tier_for_model_key(model_key: str, default_tier: str) -> str: + """Infer required paygate tier from model key naming.""" + low = (model_key or "").lower() + # Ultra-relaxed models are PAYGATE_TIER_TWO in current Flow naming. + if "ultra_relaxed" in low: + return "PAYGATE_TIER_TWO" + # Fast portrait/landscape variants map to tier one configs. + if "fast" in low: + return "PAYGATE_TIER_ONE" + return default_tier + + +def _extract_error_text(result: dict) -> str: + """Extract normalized error text from extension/API response.""" + if not isinstance(result, dict): + return str(result) + error = result.get("error") + if error: + return str(error) + data = result.get("data", {}) + if isinstance(data, dict): + err = data.get("error") + if isinstance(err, dict): + msg = str(err.get("message") or json.dumps(err)[:240]) + details = err.get("details") + if isinstance(details, list): + for detail in details: + if isinstance(detail, dict) and detail.get("reason"): + msg = f"{msg} [{detail['reason']}]" + break + return msg + if err: + return str(err) + return "" + + +def _is_internal_error_text(text: str | None) -> bool: + low = (text or "").lower() + return "internal error encountered" in low or "internal error" in low + + +def _is_signed_url_expired(url: str | None) -> bool: + if not isinstance(url, str) or not url.startswith("http"): + return False + try: + parsed = urlparse(url) + qs = parse_qs(parsed.query) + raw = (qs.get("Expires") or qs.get("expires") or [None])[0] + if raw is None: + return False + expires_ts = int(raw) + return expires_ts <= int(time.time()) + except Exception: + return False + + +def _is_direct_media_url(url: str | None) -> bool: + """Return True for renderer-safe media URLs we can persist to DB. + + Rejects trpc redirect URLs (media.getMediaUrlRedirect) which often become + unusable across sessions. + """ + if not isinstance(url, str): + return False + text = url.strip() + if not text.startswith("http"): + return False + low = text.lower() + if "media.getmediaurlredirect" in low: + return False + if low.startswith("https://flow-content.google/"): + return True + if low.startswith("https://storage.googleapis.com/"): + return True + if "googleusercontent.com/" in low: + return True + return False + + +def _extract_local_media_path(url: str | None) -> Path | None: + """Extract local media file path from absolute path / file:// / local-media proxy URL.""" + if not isinstance(url, str): + return None + text = url.strip() + if not text: + return None + + if text.startswith("http://") or text.startswith("https://"): + try: + parsed = urlparse(text) + host = (parsed.hostname or "").lower() + if host not in ("127.0.0.1", "localhost"): + return None + if parsed.path.rstrip("/") != "/api/flow/local-media": + return None + raw_path = (parse_qs(parsed.query).get("path") or [None])[0] + if not isinstance(raw_path, str) or not raw_path.strip(): + return None + candidate = Path(unquote(raw_path)).expanduser() + return candidate if candidate.is_absolute() else None + except Exception: + return None + + if text.startswith("file://"): + try: + parsed = urlparse(text) + candidate = Path(unquote(parsed.path)).expanduser() + return candidate if candidate.is_absolute() else None + except Exception: + return None + + candidate = Path(text).expanduser() + return candidate if candidate.is_absolute() else None + + +def _has_local_media_file(url: str | None) -> bool: + path = _extract_local_media_path(url) + if not path: + return False + try: + return path.exists() and path.is_file() + except Exception: + return False + + +def _scene_slot_meta(url_field: str) -> tuple[str, str, str] | None: + if url_field.startswith("vertical_"): + axis = "vertical" + elif url_field.startswith("horizontal_"): + axis = "horizontal" + else: + return None + if "_image_" in url_field: + return "image", axis, "png" + if "_upscale_" in url_field: + return "upscale", axis, "mp4" + if "_video_" in url_field: + return "video", axis, "mp4" + return None + + +def _canonical_scene_media_base_path( + *, + project_slug: str, + scene_id: str, + display_order: int, + kind: str, + axis: str, +) -> Path: + ext = "png" if kind == "image" else "mp4" + subdir = "images" if kind == "image" else ("upscale" if kind == "upscale" else "videos") + filename = scene_filename(display_order + 1, scene_id, ext=ext) + return OUTPUT_DIR / project_slug / subdir / axis / filename + + +def _find_scene_canonical_file( + *, + project_slug: str, + scene_id: str, + display_order: int, + kind: str, + axis: str, +) -> Path | None: + base = _canonical_scene_media_base_path( + project_slug=project_slug, + scene_id=scene_id, + display_order=display_order, + kind=kind, + axis=axis, + ) + if base.exists() and base.is_file(): + return base + for candidate in sorted(base.parent.glob(f"{base.stem}.*")): + if candidate.is_file(): + return candidate + return None + + +def _find_scene_media_file_any_project( + *, + scene_id: str, + kind: str, + axis: str, +) -> Path | None: + """Fallback local lookup when project slug changed after media was downloaded.""" + subdir = "images" if kind == "image" else ("upscale" if kind == "upscale" else "videos") + pattern = f"scene_*_{scene_id}.*" + try: + for project_dir in sorted(OUTPUT_DIR.iterdir()): + if not project_dir.is_dir(): + continue + media_dir = project_dir / subdir / axis + if not media_dir.exists() or not media_dir.is_dir(): + continue + matches = [p for p in sorted(media_dir.glob(pattern)) if p.is_file()] + if matches: + return matches[0] + except Exception: + return None + return None + + +def _find_character_ref_any_slug(character_id: str) -> Path | None: + refs_dir = OUTPUT_DIR / "_shared" / "refs" + if not refs_dir.exists() or not refs_dir.is_dir(): + return None + try: + matches = [p for p in sorted(refs_dir.glob(f"*_{character_id}.*")) if p.is_file()] + if matches: + return matches[0] + except Exception: + return None + return None + + +_API_PUBLIC_HOST = "127.0.0.1" if API_HOST in {"0.0.0.0", "::"} else API_HOST +_LOCAL_MEDIA_PROXY_BASE = f"http://{_API_PUBLIC_HOST}:{API_PORT}/api/flow/local-media" +_IMAGE_EXT_BY_MIME = { + "image/jpeg": "jpg", + "image/jpg": "jpg", + "image/png": "png", + "image/webp": "webp", + "image/gif": "gif", + "image/bmp": "bmp", + "image/avif": "avif", +} +_VIDEO_EXT_BY_MIME = { + "video/mp4": "mp4", + "video/quicktime": "mov", + "video/webm": "webm", + "video/x-matroska": "mkv", +} + + +def _build_local_media_proxy_url(path: Path) -> str: + return f"{_LOCAL_MEDIA_PROXY_BASE}?path={quote(str(path), safe='')}" + + +def _guess_media_ext(url: str, content_type: str | None, kind: str) -> str: + if content_type: + mime = content_type.split(";", 1)[0].strip().lower() + if kind == "video" and mime in _VIDEO_EXT_BY_MIME: + return _VIDEO_EXT_BY_MIME[mime] + if kind == "image" and mime in _IMAGE_EXT_BY_MIME: + return _IMAGE_EXT_BY_MIME[mime] + try: + parsed = urlparse(url) + suffix = Path(parsed.path).suffix.lower().lstrip(".") + if kind == "video" and suffix in {"mp4", "mov", "webm", "mkv"}: + return suffix + if kind == "image" and suffix in {"jpg", "jpeg", "png", "webp", "gif", "bmp", "avif"}: + return "jpg" if suffix == "jpeg" else suffix + except Exception: + pass + return "mp4" if kind == "video" else "png" + + +async def _download_remote_media(url: str, target_base_path: Path, kind: str) -> Path | None: + if not _is_direct_media_url(url): + return None + target_base_path.parent.mkdir(parents=True, exist_ok=True) + timeout_sec = 420 if kind == "video" else 180 + try: + connector = aiohttp.TCPConnector(ssl=False) + timeout = aiohttp.ClientTimeout(total=timeout_sec) + async with aiohttp.ClientSession(connector=connector, timeout=timeout) as session: + async with session.get(url) as resp: + if resp.status != 200: + return None + ext = _guess_media_ext(url, resp.headers.get("content-type"), kind) + target_path = target_base_path.with_suffix(f".{ext}") + tmp_path = target_path.with_suffix(f"{target_path.suffix}.tmp") + tmp_path.write_bytes(await resp.read()) + tmp_path.replace(target_path) + return target_path + except Exception as exc: + logger.debug("Failed downloading local media copy from %s: %s", url, exc) + return None + + +def _is_model_access_denied(result: dict) -> bool: + text = _extract_error_text(result).lower() + return ( + "public_error_model_access_denied" in text + or "model_access_denied" in text + or "does not have permission" in text + ) + + +def _model_key_variants(model_key: str) -> list[str]: + """Generate safe fallback variants for known Flow naming patterns.""" + low = (model_key or "").lower() + variants: list[str] = [] + if not low: + return variants + # Some accounts only allow *_ultra_relaxed variants for portrait/landscape. + if "ultra_relaxed" not in low and ("_portrait" in low or "_landscape" in low): + variants.append(f"{model_key}_ultra_relaxed") + return variants + + +def _resolve_video_model_candidates( + *, + user_paygate_tier: str, + gen_type: str, + aspect_ratio: str, + requested_model_key: str | None = None, +) -> list[tuple[str, str, str]]: + """Return ordered, deduplicated candidates: (source, model_key, context_tier).""" + candidates: list[tuple[str, str, str]] = [] + if requested_model_key: + candidates.append( + ("requested", requested_model_key, _tier_for_model_key(requested_model_key, user_paygate_tier)) + ) + + tier_order = [user_paygate_tier] + for tier in ("PAYGATE_TIER_TWO", "PAYGATE_TIER_ONE"): + if tier not in tier_order: + tier_order.append(tier) + + for tier in tier_order: + key = VIDEO_MODELS.get(tier, {}).get(gen_type, {}).get(aspect_ratio) + if key: + candidates.append((f"{tier}:{gen_type}:{aspect_ratio}", key, _tier_for_model_key(key, tier))) + + # Heuristic variants (append at the end, keep priority for explicit config). + base_candidates = list(candidates) + for source, key, tier in base_candidates: + for variant in _model_key_variants(key): + candidates.append((f"{source}:variant", variant, _tier_for_model_key(variant, tier))) + + seen: set[str] = set() + deduped: list[tuple[str, str, str]] = [] + for source, key, tier in candidates: + if key and key not in seen: + seen.add(key) + deduped.append((source, key, tier)) + + matched = [c for c in deduped if _model_matches_aspect(c[1], aspect_ratio)] + return matched if matched else deduped + + +def _resolve_video_model_key( + *, + user_paygate_tier: str, + gen_type: str, + aspect_ratio: str, + requested_model_key: str | None = None, +) -> tuple[str | None, str]: + """Pick an aspect-compatible video model key. + + Priority: + 1) explicitly requested key (if aspect-compatible), + 2) configured key for current tier + requested ratio, + 3) compatible fallback from known tiers for same gen_type + ratio. + """ + candidates = _resolve_video_model_candidates( + user_paygate_tier=user_paygate_tier, + gen_type=gen_type, + aspect_ratio=aspect_ratio, + requested_model_key=requested_model_key, + ) + + if candidates: + source, key, resolved_tier = candidates[0] + if requested_model_key and source != "requested": + logger.warning( + "Requested model '%s' mismatches %s. Falling back to '%s' from %s", + requested_model_key, + aspect_ratio, + key, + source, + ) + if not _model_matches_aspect(key, aspect_ratio): + logger.warning( + "No aspect-compatible model found for %s %s %s; using '%s' from %s", + user_paygate_tier, + gen_type, + aspect_ratio, + key, + source, + ) + return key, resolved_tier + return None, user_paygate_tier class FlowClient: @@ -25,36 +456,116 @@ class FlowClient: def __init__(self): self._extension_ws = None # Set by WS server when extension connects + self._extension_ws_pool: set = set() + self._extension_ws_order: dict[object, int] = {} + self._extension_ws_seq = 0 self._pending: dict[str, asyncio.Future] = {} self._flow_key: Optional[str] = None + self._sync_in_progress = False + self._sync_task: asyncio.Task | None = None + self._last_tier_sync_at: float = 0.0 + self._last_tier_sync_flow_key: str = "" + self._tier_sync_min_interval_sec = float(TIER_SYNC_MIN_INTERVAL_SEC) + self._credits_cache: dict | None = None + self._credits_cached_at: float = 0.0 + self._credits_cache_flow_key: str = "" + self._credits_inflight: asyncio.Task | None = None # WS stats self._ws_connect_count = 0 self._ws_disconnect_count = 0 self._ws_connected_at: Optional[float] = None self._ws_last_disconnect_at: Optional[float] = None + # Guard against pull_project_urls spam when UI retries many broken media at once. + self._project_pull_cooldown_until: dict[str, float] = {} def set_extension(self, ws): """Called when extension connects via WS.""" + # Keep a pool because MV3 service workers can reconnect frequently. + # Forcing close on older sockets can create reconnect storms. + self._extension_ws_pool.add(ws) + self._extension_ws_seq += 1 + self._extension_ws_order[ws] = self._extension_ws_seq self._extension_ws = ws self._ws_connect_count += 1 self._ws_connected_at = time.time() logger.info("Extension connected #%d (waiting for extension_ready/token_captured to sync)", self._ws_connect_count) - def clear_extension(self): - """Called when extension disconnects.""" - self._extension_ws = None + def _choose_live_ws(self): + """Pick any live WS from pool (latest available).""" + ordered = sorted( + list(self._extension_ws_pool), + key=lambda item: self._extension_ws_order.get(item, 0), + reverse=True, + ) + for ws in ordered: + if getattr(ws, "closed", False): + self._extension_ws_pool.discard(ws) + self._extension_ws_order.pop(ws, None) + continue + return ws + return None + + def _live_ws_candidates(self) -> list[object]: + ordered = sorted( + list(self._extension_ws_pool), + key=lambda item: self._extension_ws_order.get(item, 0), + reverse=True, + ) + live: list[object] = [] + for ws in ordered: + if getattr(ws, "closed", False): + self._extension_ws_pool.discard(ws) + self._extension_ws_order.pop(ws, None) + continue + live.append(ws) + if self._extension_ws in live: + live.remove(self._extension_ws) + live.insert(0, self._extension_ws) + return live + + def clear_extension(self, ws=None): + """Called when an extension WS disconnects. + + We can have transient duplicate WS connections (service worker reload/reconnect). + Only drop global connectivity when no live WS remains. + """ + if ws is not None: + self._extension_ws_pool.discard(ws) + self._extension_ws_order.pop(ws, None) + if self._extension_ws is ws: + self._extension_ws = self._choose_live_ws() + elif self._extension_ws is None: + self._extension_ws = self._choose_live_ws() + else: + self._extension_ws_pool.clear() + self._extension_ws_order.clear() + self._extension_ws = None + self._ws_disconnect_count += 1 self._ws_last_disconnect_at = time.time() - # Cancel all pending futures (copy to avoid RuntimeError on concurrent modification) + + if self._extension_ws is not None: + logger.info( + "Extension WS disconnected, but another WS is still active (connects=%d disconnects=%d)", + self._ws_connect_count, + self._ws_disconnect_count, + ) + return + + # No live WS left: fail pending requests. pending_copy = list(self._pending.items()) count = len(pending_copy) - for req_id, future in pending_copy: + for _req_id, future in pending_copy: if not future.done(): future.set_exception(ConnectionError("Extension disconnected")) self._pending.clear() - logger.warning("Extension disconnected, cleared %d pending requests", count) + logger.warning("All extension WS disconnected, cleared %d pending requests", count) def set_flow_key(self, key: str): + if key and key != self._flow_key: + self._credits_cache = None + self._credits_cached_at = 0.0 + self._credits_cache_flow_key = "" self._flow_key = key @property @@ -76,14 +587,22 @@ def ws_stats(self) -> dict: async def handle_message(self, data: dict): """Handle incoming message from extension.""" if data.get("type") == "token_captured": - self._flow_key = data.get("flowKey") - logger.info("Flow key captured from extension") - asyncio.create_task(self._sync_tier()) + new_key = str(data.get("flowKey") or "").strip() + key_changed = bool(new_key) and new_key != (self._flow_key or "") + if new_key: + if key_changed: + self._credits_cache = None + self._credits_cached_at = 0.0 + self._credits_cache_flow_key = "" + self._flow_key = new_key + logger.info("Flow key captured from extension%s", " (updated)" if key_changed else "") + self._queue_tier_sync(reason="token_captured", force=key_changed) return if data.get("type") == "extension_ready": logger.info("Extension ready, flowKey=%s", "yes" if data.get("flowKeyPresent") else "no") - asyncio.create_task(self._sync_tier()) + # Avoid redundant credits checks on each worker reconnect. + self._queue_tier_sync(reason="extension_ready") return if data.get("type") == "media_urls_refresh": @@ -106,16 +625,50 @@ async def handle_message(self, data: dict): self._pending[req_id].set_result(data) return - async def _sync_tier(self): + def _queue_tier_sync(self, *, reason: str, force: bool = False): + """Schedule a debounced tier sync to avoid spamming /v1/credits.""" + if not self.connected: + return + if not self._flow_key: + # Without a captured token, credits call is likely to fail/noise. + return + + now = time.time() + key_changed = self._flow_key != self._last_tier_sync_flow_key + should_run = force or key_changed or (now - self._last_tier_sync_at) >= self._tier_sync_min_interval_sec + if not should_run: + return + + if self._sync_task and not self._sync_task.done(): + return + + self._sync_task = asyncio.create_task(self._sync_tier(reason=reason)) + + async def _sync_tier(self, *, reason: str = "unknown"): """Detect current tier from credits API and update all active projects.""" - if getattr(self, '_sync_in_progress', False): + if self._sync_in_progress: return self._sync_in_progress = True try: - result = await self.get_credits() + result = await self.get_credits(max_age_sec=self._tier_sync_min_interval_sec) + status = result.get("status") + if _is_ws_error(result) or (isinstance(status, int) and status >= 400): + self._last_tier_sync_at = time.time() + logger.warning( + "Tier sync skipped (%s): credits failed status=%s error=%s", + reason, + status, + _extract_error_text(result) or "unknown", + ) + return + data = result.get("data", result) - tier = data.get("userPaygateTier", "PAYGATE_TIER_ONE") - logger.info("Syncing tier: %s", tier) + tier = data.get("userPaygateTier") + if not tier: + self._last_tier_sync_at = time.time() + logger.warning("Tier sync skipped (%s): userPaygateTier missing in credits payload", reason) + return + logger.info("Syncing tier: %s (reason=%s)", tier, reason) from agent.db import crud projects = await crud.list_projects(status="ACTIVE") @@ -124,15 +677,68 @@ async def _sync_tier(self): await crud.update_project(p["id"], user_paygate_tier=tier) logger.info("Updated project %s tier: %s -> %s", p["id"][:12], p.get("user_paygate_tier"), tier) + self._last_tier_sync_at = time.time() + self._last_tier_sync_flow_key = self._flow_key or "" except Exception as e: + self._last_tier_sync_at = time.time() logger.warning("Failed to sync tier: %s", e) finally: self._sync_in_progress = False _UUID_RE = __import__("re").compile(r'^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$') - _SAFE_URL_RE = __import__("re").compile(r'^https://(storage\.googleapis\.com|lh3\.googleusercontent\.com)/') + _SAFE_URL_RE = __import__("re").compile(r'^https://(storage\.googleapis\.com|lh3\.googleusercontent\.com|flow-content\.google)/') - async def _refresh_media_urls(self, urls: list[dict]): + @staticmethod + def _extract_media_url_from_result(result: dict, media_id: str | None = None) -> str | None: + """Extract URL from get_media response (supports nested payloads). + + Google can return URL in multiple fields or nested structures. We look for common keys first, + then recursively scan for HTTP URLs that likely point to this media. + """ + if not isinstance(result, dict): + return None + payload = result.get("data", result) + + def _prefer(url: str) -> bool: + if not _is_direct_media_url(url): + return False + low = str(url).lower() + if media_id and str(media_id).lower() in low: + return True + return True + + if isinstance(payload, dict): + for key in ("fifeUrl", "servingUri", "url", "imageUri", "videoUri"): + value = payload.get(key) + if _prefer(value): + return value + + seen_ids: set[int] = set() + queue: deque = deque([payload]) + while queue: + node = queue.popleft() + node_id = id(node) + if node_id in seen_ids: + continue + seen_ids.add(node_id) + + if isinstance(node, dict): + for key, value in node.items(): + if key in ("fifeUrl", "servingUri", "url", "imageUri", "videoUri") and _prefer(value): + return value + if isinstance(value, (dict, list, tuple)): + queue.append(value) + elif _prefer(value): + return value + elif isinstance(node, (list, tuple)): + for value in node: + if isinstance(value, (dict, list, tuple)): + queue.append(value) + elif _prefer(value): + return value + return None + + async def _refresh_media_urls(self, urls: list[dict]) -> int: """Update scene/character URLs in DB from fresh TRPC-captured signed URLs. Each entry: {mediaId: str, mediaType: 'image'|'video', url: str} @@ -145,6 +751,7 @@ async def _refresh_media_urls(self, urls: list[dict]): media_id = entry.get("mediaId", "") media_type = entry.get("mediaType", "") url = entry.get("url", "") + project_id = str(entry.get("projectId") or "").strip().lower() if not media_id or not url: continue # Validate media_id is UUID and url is from trusted domains @@ -156,6 +763,12 @@ async def _refresh_media_urls(self, urls: list[dict]): continue if media_type not in ("image", "video"): continue + cached_local_url = await self.cache_media_locally( + media_id, + url, + project_id=project_id or None, + ) + effective_url = cached_local_url or url # Try matching against scenes (check both orientations) scenes = await crud.list_scenes_by_media_id(media_id) @@ -164,18 +777,24 @@ async def _refresh_media_urls(self, urls: list[dict]): if media_type == "image": # Update whichever orientation matches if scene.get("vertical_image_media_id") == media_id: - updates["vertical_image_url"] = url + if not _has_local_media_file(scene.get("vertical_image_url")): + updates["vertical_image_url"] = effective_url if scene.get("horizontal_image_media_id") == media_id: - updates["horizontal_image_url"] = url + if not _has_local_media_file(scene.get("horizontal_image_url")): + updates["horizontal_image_url"] = effective_url elif media_type == "video": if scene.get("vertical_video_media_id") == media_id: - updates["vertical_video_url"] = url + if not _has_local_media_file(scene.get("vertical_video_url")): + updates["vertical_video_url"] = effective_url if scene.get("horizontal_video_media_id") == media_id: - updates["horizontal_video_url"] = url + if not _has_local_media_file(scene.get("horizontal_video_url")): + updates["horizontal_video_url"] = effective_url if scene.get("vertical_upscale_media_id") == media_id: - updates["vertical_upscale_url"] = url + if not _has_local_media_file(scene.get("vertical_upscale_url")): + updates["vertical_upscale_url"] = effective_url if scene.get("horizontal_upscale_media_id") == media_id: - updates["horizontal_upscale_url"] = url + if not _has_local_media_file(scene.get("horizontal_upscale_url")): + updates["horizontal_upscale_url"] = effective_url if updates: await crud.update_scene(scene["id"], **updates) updated += 1 @@ -184,26 +803,893 @@ async def _refresh_media_urls(self, urls: list[dict]): chars = await crud.list_characters_by_media_id(media_id) for char in chars: if media_type == "image" and char.get("media_id") == media_id: - await crud.update_character(char["id"], reference_image_url=url) - updated += 1 + if not _has_local_media_file(char.get("reference_image_url")): + await crud.update_character(char["id"], reference_image_url=effective_url) + updated += 1 if updated: logger.info("Refreshed %d media URLs from TRPC intercept", updated) await event_bus.emit("urls_refreshed", {"count": updated}) + return updated + + async def cache_media_locally( + self, + media_id: str, + remote_url: str | None, + *, + project_id: str | None = None, + ) -> str | None: + """Persist a stable local copy for a scene/character media and return proxy URL.""" + from agent.db import crud - async def refresh_project_urls(self, project_id: str) -> dict: - """Refresh media URLs for a project. + if not isinstance(media_id, str) or not self._UUID_RE.match(media_id): + return None + if not _is_direct_media_url(remote_url): + return None + + requested_project_id = str(project_id or "").strip().lower() + first_local_url: str | None = None + + scenes = await crud.list_scenes_by_media_id(media_id) + video_cache: dict[str, dict | None] = {} + project_cache: dict[str, dict | None] = {} + + async def _scene_project(scene: dict) -> tuple[dict | None, dict | None]: + video_id = scene.get("video_id") + if not isinstance(video_id, str) or not video_id: + return None, None + if video_id not in video_cache: + video_cache[video_id] = await crud.get_video(video_id) + video = video_cache.get(video_id) + pid = (video or {}).get("project_id") + if not isinstance(pid, str) or not pid: + return video, None + if pid not in project_cache: + project_cache[pid] = await crud.get_project(pid) + return video, project_cache.get(pid) + + scene_slots = ( + ("vertical_image_media_id", "vertical_image_url", "image", "vertical"), + ("horizontal_image_media_id", "horizontal_image_url", "image", "horizontal"), + ("vertical_video_media_id", "vertical_video_url", "video", "vertical"), + ("horizontal_video_media_id", "horizontal_video_url", "video", "horizontal"), + ("vertical_upscale_media_id", "vertical_upscale_url", "upscale", "vertical"), + ("horizontal_upscale_media_id", "horizontal_upscale_url", "upscale", "horizontal"), + ) + + for scene in scenes: + video, project = await _scene_project(scene) + scene_project_id = str((video or {}).get("project_id") or "").strip().lower() + if requested_project_id and scene_project_id and scene_project_id != requested_project_id: + continue - Note: Google Flow's get_media API returns encoded content (base64), - not fresh signed URLs. URL refresh requires TRPC intercept from - the extension when the user opens the project in Chrome. - The video reviewer falls back to get_media content directly. + for media_field, url_field, kind, axis in scene_slots: + if scene.get(media_field) != media_id: + continue + + current_url = scene.get(url_field) + if _has_local_media_file(current_url): + if isinstance(current_url, str): + return current_url + continue + + project_seed = ( + (project or {}).get("name") + or (video or {}).get("project_id") + or scene.get("video_id") + or "project" + ) + project_slug = slugify(str(project_seed)) or "project" + display_order = int(scene.get("display_order") or 0) + 1 + nominal_ext = "png" if kind == "image" else "mp4" + canonical_name = scene_filename(display_order, scene["id"], ext=nominal_ext) + subdir = "images" if kind == "image" else ("upscale" if kind == "upscale" else "videos") + target_base_path = OUTPUT_DIR / project_slug / subdir / axis / canonical_name + downloaded = await _download_remote_media( + remote_url, + target_base_path, + "image" if kind == "image" else "video", + ) + if not downloaded: + continue + + proxy_url = _build_local_media_proxy_url(downloaded) + if scene.get(url_field) != proxy_url: + await crud.update_scene(scene["id"], **{url_field: proxy_url}) + if first_local_url is None: + first_local_url = proxy_url + + chars = await crud.list_characters_by_media_id(media_id) + for char in chars: + char_project_id = str(char.get("project_id") or "").strip().lower() + if requested_project_id and char_project_id and char_project_id != requested_project_id: + continue + + current_url = char.get("reference_image_url") + if _has_local_media_file(current_url): + if isinstance(current_url, str): + return current_url + continue + + slug = slugify(str(char.get("name") or "character")) or "character" + target_base_path = OUTPUT_DIR / "_shared" / "refs" / f"{slug}_{char['id']}.png" + downloaded = await _download_remote_media(remote_url, target_base_path, "image") + if not downloaded: + continue + + proxy_url = _build_local_media_proxy_url(downloaded) + if char.get("reference_image_url") != proxy_url: + await crud.update_character(char["id"], reference_image_url=proxy_url) + if first_local_url is None: + first_local_url = proxy_url + + return first_local_url + + async def find_local_media_url( + self, + media_id: str, + *, + project_id: str | None = None, + ) -> str | None: + from agent.db import crud + + if not isinstance(media_id, str) or not self._UUID_RE.match(media_id): + return None + + requested_project_id = str(project_id or "").strip().lower() + scenes = await crud.list_scenes_by_media_id(media_id) + video_cache: dict[str, dict | None] = {} + project_cache: dict[str, dict | None] = {} + for scene in scenes: + video_id = scene.get("video_id") + video = None + project = None + scene_project_id = "" + if isinstance(video_id, str) and video_id: + if video_id not in video_cache: + video_cache[video_id] = await crud.get_video(video_id) + video = video_cache.get(video_id) + scene_project_id = str((video or {}).get("project_id") or "").strip().lower() + if requested_project_id and scene_project_id and scene_project_id != requested_project_id: + continue + if scene_project_id: + if scene_project_id not in project_cache: + project_cache[scene_project_id] = await crud.get_project(scene_project_id) + project = project_cache.get(scene_project_id) + + project_seed = ( + (project or {}).get("name") + or (video or {}).get("project_id") + or scene.get("video_id") + or "project" + ) + project_slug = slugify(str(project_seed)) or "project" + scene_display_order = int(scene.get("display_order") or 0) + scene_id = str(scene.get("id") or "") + if not scene_id: + continue + + for url_field in ( + "vertical_image_url", + "horizontal_image_url", + "vertical_video_url", + "horizontal_video_url", + "vertical_upscale_url", + "horizontal_upscale_url", + ): + meta = _scene_slot_meta(url_field) + if not meta: + continue + kind, axis, nominal_ext = meta + url = scene.get(url_field) + local_path = _extract_local_media_path(url) + if local_path and local_path.exists() and local_path.is_file(): + canonical_base = _canonical_scene_media_base_path( + project_slug=project_slug, + scene_id=scene_id, + display_order=scene_display_order, + kind=kind, + axis=axis, + ) + canonical_target = canonical_base.with_suffix(local_path.suffix or f".{nominal_ext}") + chosen_path = local_path + if canonical_target != local_path: + try: + canonical_target.parent.mkdir(parents=True, exist_ok=True) + if not canonical_target.exists(): + shutil.copy2(local_path, canonical_target) + chosen_path = canonical_target + except Exception: + chosen_path = local_path + normalized_url = _build_local_media_proxy_url(chosen_path) + if url != normalized_url: + await crud.update_scene(scene_id, **{url_field: normalized_url}) + return normalized_url + + canonical_existing = _find_scene_canonical_file( + project_slug=project_slug, + scene_id=scene_id, + display_order=scene_display_order, + kind=kind, + axis=axis, + ) + if not canonical_existing: + canonical_existing = _find_scene_media_file_any_project( + scene_id=scene_id, + kind=kind, + axis=axis, + ) + if canonical_existing: + normalized_url = _build_local_media_proxy_url(canonical_existing) + if url != normalized_url: + await crud.update_scene(scene_id, **{url_field: normalized_url}) + return normalized_url + + chars = await crud.list_characters_by_media_id(media_id) + for char in chars: + char_project_id = str(char.get("project_id") or "").strip().lower() + if requested_project_id and char_project_id and char_project_id != requested_project_id: + continue + url = char.get("reference_image_url") + local_path = _extract_local_media_path(url) + if local_path and local_path.exists() and local_path.is_file(): + normalized_url = _build_local_media_proxy_url(local_path) + if url != normalized_url: + await crud.update_character(char["id"], reference_image_url=normalized_url) + return normalized_url + + slug = slugify(str(char.get("name") or "character")) or "character" + canonical_base = OUTPUT_DIR / "_shared" / "refs" / f"{slug}_{char['id']}.png" + canonical_existing = canonical_base if canonical_base.exists() else None + if not canonical_existing: + for candidate in sorted(canonical_base.parent.glob(f"{canonical_base.stem}.*")): + if candidate.is_file(): + canonical_existing = candidate + break + if canonical_existing and canonical_existing.exists(): + normalized_url = _build_local_media_proxy_url(canonical_existing) + if url != normalized_url: + await crud.update_character(char["id"], reference_image_url=normalized_url) + return normalized_url + fallback_ref = _find_character_ref_any_slug(char["id"]) + if fallback_ref and fallback_ref.exists(): + normalized_url = _build_local_media_proxy_url(fallback_ref) + if url != normalized_url: + await crud.update_character(char["id"], reference_image_url=normalized_url) + return normalized_url + return None + + _SIGNED_URL_RE = re.compile( + r"https://(?:storage\.googleapis\.com/ai-sandbox-videofx/(?:image|video)/[0-9a-f-]{36}" + r"|flow-content\.google/(?:image|video)/[0-9a-f-]{36})[^\s\"']*", + re.IGNORECASE, + ) + _MEDIA_PATH_RE = re.compile(r"/(image|video)/([0-9a-f-]{36})(?:\?|$)", re.IGNORECASE) + + def _extract_signed_media_entries(self, payload: object) -> list[dict]: + """Extract signed media URLs from TRPC payloads. + + Returns a deduplicated list with shape: + { "mediaId": "", "mediaType": "image|video", "url": "" } + """ + candidates: list[str] = [] + structured_candidates: list[dict] = [] + queue: deque = deque([payload]) + seen_nodes: set[int] = set() + + def _normalized(s: str) -> str: + return _decode_escaped_text(s) + + def _extract_id_and_type(value: str) -> tuple[str | None, str | None]: + if not isinstance(value, str): + return None, None + raw = value.strip() + if not raw: + return None, None + low = raw.lower() + if self._UUID_RE.match(low): + return low, None + m = self._MEDIA_PATH_RE.search(low) + if m: + return m.group(2), m.group(1) + m2 = re.search(r"\b([0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12})\b", low) + if m2: + return m2.group(1), None + return None, None + + while queue: + node = queue.popleft() + nid = id(node) + if nid in seen_nodes: + continue + seen_nodes.add(nid) + + if isinstance(node, str): + text = _normalized(node) + candidates.extend(self._SIGNED_URL_RE.findall(text)) + continue + if isinstance(node, dict): + node_media_id: str | None = None + node_media_type: str | None = None + node_url: str | None = None + + for key in ("mediaId", "media_id", "id", "name"): + value = node.get(key) + if not isinstance(value, str): + continue + extracted_id, extracted_type = _extract_id_and_type(value) + if extracted_id: + node_media_id = extracted_id + if extracted_type: + node_media_type = extracted_type + break + + for key in ("fifeUrl", "servingUri", "url", "imageUri", "videoUri"): + value = node.get(key) + if not isinstance(value, str): + continue + normalized_value = _normalized(value) + if normalized_value.startswith("http"): + node_url = normalized_value + break + + if node_media_id and node_url: + inferred_type = node_media_type + if not inferred_type: + path_match = self._MEDIA_PATH_RE.search(node_url.lower()) + if path_match: + inferred_type = path_match.group(1).lower() + if inferred_type in ("image", "video"): + structured_candidates.append( + { + "mediaId": node_media_id, + "mediaType": inferred_type, + "url": node_url, + } + ) + + for value in node.values(): + if isinstance(value, (dict, list, tuple)): + queue.append(value) + elif isinstance(value, str): + text = _normalized(value) + candidates.extend(self._SIGNED_URL_RE.findall(text)) + continue + if isinstance(node, (list, tuple)): + for value in node: + if isinstance(value, (dict, list, tuple)): + queue.append(value) + elif isinstance(value, str): + text = _normalized(value) + candidates.extend(self._SIGNED_URL_RE.findall(text)) + + dedup: dict[str, dict] = {} + for item in structured_candidates: + media_id = item.get("mediaId") + media_type = item.get("mediaType") + url = item.get("url") + if not isinstance(media_id, str) or not isinstance(media_type, str) or not isinstance(url, str): + continue + if not self._UUID_RE.match(media_id): + continue + if media_type not in ("image", "video"): + continue + dedup[media_id] = {"mediaId": media_id, "mediaType": media_type, "url": url} + + for raw in candidates: + url = _normalized(raw) + m = self._MEDIA_PATH_RE.search(url) + if not m: + continue + media_type, media_id = m.group(1).lower(), m.group(2).lower() + if not self._UUID_RE.match(media_id): + continue + dedup[media_id] = {"mediaId": media_id, "mediaType": media_type, "url": url} + return list(dedup.values()) + + async def _refresh_project_urls_via_trpc(self, project_id: str) -> tuple[int, str | None]: + """Try to refresh project URLs via TRPC flow/project endpoints. + + Returns: (updated_count, source_endpoint_or_none) """ - logger.info("URL refresh requested for project %s — TRPC endpoint no longer available, " - "use extension passive intercept (open project in Chrome)", project_id[:12]) - return {"refreshed": 0, "found": 0, "note": "TRPC endpoint unavailable. " - "Video reviewer uses get_media fallback automatically. " - "For URL refresh, open the project in Google Flow in Chrome."} + def _input_param(payload: dict) -> str: + return quote(json.dumps(payload, separators=(",", ":"))) + + base_paths = [ + "https://labs.google/fx/api/trpc", + "https://flow.google.com/api/trpc", + "https://flow.google.com/fx/api/trpc", + ] + endpoints: list[tuple[str, dict]] = [ + ("flow.getFlow", {"json": {"projectId": project_id}}), + ("flow.getFlow", {"projectId": project_id}), + ("project.getProject", {"json": {"projectId": project_id}}), + ] + endpoint_candidates: list[tuple[str, str, dict | None]] = [] + for base in base_paths: + for procedure, payload in endpoints: + endpoint_candidates.append( + ( + f"{base}/{procedure}?input={_input_param(payload)}", + "GET", + None, + ) + ) + endpoint_candidates.append( + ( + f"{base}/{procedure}?batch=1&input={_input_param({'0': payload})}", + "GET", + None, + ) + ) + if len(endpoint_candidates) >= 8: + break + if len(endpoint_candidates) >= 8: + break + headers = { + "content-type": "application/json", + "accept": "*/*", + } + + for url, method, body in endpoint_candidates: + result = await self._send("trpc_request", { + "url": url, + "method": method, + "headers": headers, + "body": body, + }, timeout=7) + + status = result.get("status") + if _is_ws_error(result) or (isinstance(status, int) and status >= 400): + logger.info( + "TRPC URL refresh candidate failed: %s (%s)", + url, + _extract_error_text(result) or f"HTTP_{status}", + ) + continue + + payload = result.get("data", result) + entries = self._extract_signed_media_entries(payload) + if not entries: + logger.info("TRPC URL refresh candidate returned no signed URLs: %s", url) + continue + + updated = await self._refresh_media_urls(entries) + logger.info("TRPC URL refresh succeeded via %s (entries=%d, updated=%d)", url, len(entries), updated) + return updated, url + + return 0, None + + async def _refresh_project_urls_via_flow_tab( + self, + project_id: str, + media_hints: list[dict] | None = None, + ) -> tuple[int, str | None, list[dict], str | None]: + """Ask extension to scrape signed URLs directly from the active Flow tab.""" + payload: dict = {"projectId": project_id} + payload["forceFresh"] = True + if media_hints: + payload["mediaHints"] = media_hints[:240] + result = await self._send( + "pull_project_urls", + payload, + timeout=8, + ) + status = result.get("status") + if _is_ws_error(result) or (isinstance(status, int) and status >= 400): + err_text = _extract_error_text(result) or f"HTTP_{status}" + logger.info( + "Flow-tab URL pull failed: %s", + err_text, + ) + return 0, None, [], err_text + + raw_data = result.get("data") + if not isinstance(raw_data, dict): + raw_data = result.get("result") + data = raw_data if isinstance(raw_data, dict) else result + entries_raw = data.get("entries") if isinstance(data, dict) else None + attempts = data.get("attempts") if isinstance(data, dict) else [] + attempts = attempts if isinstance(attempts, list) else [] + if not isinstance(entries_raw, list) or not entries_raw: + logger.info("Flow-tab URL pull returned no entries for project %s", project_id) + return 0, None, attempts, None + + entries: list[dict] = [] + for row in entries_raw: + if not isinstance(row, dict): + continue + media_id = str(row.get("mediaId") or "").lower().strip() + media_type = str(row.get("mediaType") or "").lower().strip() + url = str(row.get("url") or "").strip() + if not media_id or not media_type or not url: + continue + if not self._UUID_RE.match(media_id): + continue + if media_type not in ("image", "video"): + continue + entries.append({"mediaId": media_id, "mediaType": media_type, "url": url}) + + if not entries: + return 0, None, attempts, None + + updated = await self._refresh_media_urls(entries) + logger.info( + "Flow-tab URL pull refreshed %d rows (entries=%d)", + updated, + len(entries), + ) + return updated, "flow_tab", attempts, None + + async def _get_media_with_retry( + self, + media_id: str, + project_id: str | None, + *, + timeout_sec: float = 10.0, + ) -> tuple[dict, str]: + """Fetch get_media quickly with a project→global fallback. + + For refresh flows we prefer fast failover instead of long sequential retries: + - try project-scoped once + - try global once + """ + attempts: list[tuple[str, str | None]] = [] + if project_id: + attempts.append(("project", project_id)) + attempts.append(("global", None)) + + last_result: dict = {"error": "Unknown error"} + last_mode = "global" + for mode, pid in attempts: + last_mode = mode + result = await self.get_media(media_id, project_id=pid, timeout_sec=timeout_sec) + last_result = result + status = result.get("status", 0) + if not _is_ws_error(result) and (not isinstance(status, int) or status < 400): + # Some project-scoped responses can be HTTP 200 but omit usable URL. + # In that case continue to the next mode (global) before declaring success. + direct_url = self._extract_media_url_from_result(result, media_id) + if direct_url: + return result, mode + if mode == "project": + logger.info( + "get_media(%s) project mode returned 200 but no direct URL; fallback to global", + media_id[:12], + ) + continue + return result, mode + + # 401 means auth/token issue — no point trying further modes. + if isinstance(status, int) and status == 401: + break + + # Internal errors for images are often deterministic; move on quickly. + if mode == "project": + continue + break + + return last_result, last_mode + + async def refresh_project_urls(self, project_id: str) -> dict: + """Refresh scene/character URLs for a project via get_media(media_id).""" + from agent.db import crud + from agent.services.event_bus import event_bus + + videos = await crud.list_videos(project_id) + scenes: list[dict] = [] + for video in videos: + scenes.extend(await crud.list_scenes(video["id"])) + characters = await crud.get_project_characters(project_id) + + slot_pairs = [ + ("vertical_image_media_id", "vertical_image_url"), + ("horizontal_image_media_id", "horizontal_image_url"), + ("vertical_video_media_id", "vertical_video_url"), + ("horizontal_video_media_id", "horizontal_video_url"), + ("vertical_upscale_media_id", "vertical_upscale_url"), + ("horizontal_upscale_media_id", "horizontal_upscale_url"), + ] + + media_ids: list[str] = [] + media_hint_by_mid: dict[str, str] = {} + for scene in scenes: + for media_field, _ in slot_pairs: + mid = scene.get(media_field) + if isinstance(mid, str) and mid: + media_ids.append(mid) + if "image_media_id" in media_field: + media_hint_by_mid.setdefault(mid, "image") + elif ( + "video_media_id" in media_field + or "upscale_media_id" in media_field + ): + media_hint_by_mid.setdefault(mid, "video") + for char in characters: + mid = char.get("media_id") + if isinstance(mid, str) and mid: + media_ids.append(mid) + media_hint_by_mid.setdefault(mid, "image") + + unique_media_ids: list[str] = [] + seen_media: set[str] = set() + for mid in media_ids: + if mid in seen_media: + continue + seen_media.add(mid) + unique_media_ids.append(mid) + + media_hints = [ + { + "mediaId": mid, + "mediaType": media_hint_by_mid.get(mid, ""), + } + for mid in unique_media_ids + ] + + # First ask active Flow tab/cache for signed URLs (most reliable in Electron runtime). + try: + tab_updated, tab_source, tab_attempts, tab_error = await asyncio.wait_for( + self._refresh_project_urls_via_flow_tab( + project_id, + media_hints=media_hints, + ), + timeout=10, + ) + except asyncio.TimeoutError: + tab_updated, tab_source, tab_attempts, tab_error = (0, None, [], "pull_project_urls timeout") + logger.warning("refresh_project_urls: pull_project_urls timeout for project %s", project_id[:12]) + # Fallback to direct TRPC probes only when tab pull didn't refresh anything. + trpc_updated, trpc_source = (0, None) + if not tab_updated and not tab_error: + try: + trpc_updated, trpc_source = await asyncio.wait_for( + self._refresh_project_urls_via_trpc(project_id), + timeout=5, + ) + except asyncio.TimeoutError: + trpc_updated, trpc_source = (0, None) + logger.warning("refresh_project_urls: trpc fallback timeout for project %s", project_id[:12]) + + # Reload after TRPC pass so we can skip already refreshed media ids. + videos = await crud.list_videos(project_id) + scenes = [] + for video in videos: + scenes.extend(await crud.list_scenes(video["id"])) + characters = await crud.get_project_characters(project_id) + + existing_url_by_mid: dict[str, str] = {} + for scene in scenes: + for media_field, url_field in slot_pairs: + mid = scene.get(media_field) + url = scene.get(url_field) + if ( + isinstance(mid, str) + and mid + and isinstance(url, str) + and (_is_direct_media_url(url) or _has_local_media_file(url)) + ): + if _is_direct_media_url(url) and _is_signed_url_expired(url): + continue + existing_url_by_mid[mid] = url + for char in characters: + mid = char.get("media_id") + url = char.get("reference_image_url") + if ( + isinstance(mid, str) + and mid + and isinstance(url, str) + and (_is_direct_media_url(url) or _has_local_media_file(url)) + ): + if _is_direct_media_url(url) and _is_signed_url_expired(url): + continue + existing_url_by_mid[mid] = url + + url_cache: dict[str, str | None] = {} + failed = 0 + auth_failed = False + errors: list[str] = [] + unresolved_media_ids: list[str] = [] + for mid in unique_media_ids: + existing = existing_url_by_mid.get(mid) + if existing: + url_cache[mid] = existing + else: + unresolved_media_ids.append(mid) + + MAX_FOLLOWUP_MEDIA_READS = 12 + + async def fetch_one_media( + mid: str, + ) -> tuple[str, str | None, str | None, bool, bool, bool]: + result, mode = await self._get_media_with_retry(mid, project_id, timeout_sec=4.5) + status = result.get("status", 500) + is_server_failure = isinstance(status, int) and status >= 500 + if _is_ws_error(result) or (isinstance(status, int) and status >= 400): + details = _extract_error_text(result) or str(result.get("data", ""))[:200] + is_internal_failure = _is_internal_error_text(details) + if is_internal_failure: + details = "Google Flow internal error (media có thể đã bị xóa hoặc chưa đồng bộ)" + return ( + mid, + None, + f"{details or f'HTTP_{status}'} [{mode}]", + bool(isinstance(status, int) and status == 401), + is_server_failure, + is_internal_failure, + ) + + url = self._extract_media_url_from_result(result, mid) + if not url: + return mid, None, f"no URL in get_media response [{mode}]", False, False, False + if not _is_direct_media_url(url): + return mid, None, f"non-direct URL in get_media response [{mode}]", False, False, False + return mid, url, None, False, False, False + + quick_stop_due_internal = False + skipped_followup_reads = 0 + + def consume_fetch_results( + rows: list[tuple[str, str | None, str | None, bool, bool, bool]], + ) -> dict[str, int]: + nonlocal failed, auth_failed + internal_failures = 0 + server_failures = 0 + row_failures = 0 + for mid, fresh_url, err_text, got_401, is_server_failure, is_internal_failure in rows: + if fresh_url: + url_cache[mid] = fresh_url + continue + url_cache[mid] = None + failed += 1 + row_failures += 1 + if got_401: + auth_failed = True + if is_server_failure: + server_failures += 1 + if is_internal_failure or (err_text and "internal error" in err_text.lower()): + internal_failures += 1 + if len(errors) < 5: + errors.append(f"{mid[:8]}: {err_text or 'unknown error'}") + return { + "internal": internal_failures, + "server": server_failures, + "failed": row_failures, + "total": len(rows), + } + + if unresolved_media_ids: + probe_ids = unresolved_media_ids[:4] + probe_rows = await asyncio.gather(*(fetch_one_media(mid) for mid in probe_ids)) + probe_stats = consume_fetch_results(probe_rows) + + remaining_ids = [mid for mid in unresolved_media_ids if mid not in set(probe_ids)] + probe_failed = probe_stats.get("failed", 0) + probe_internal = probe_stats.get("internal", 0) + probe_server = probe_stats.get("server", 0) + severe_probe_failures = probe_internal + probe_server + should_quick_stop = ( + bool(remaining_ids) + and ( + auth_failed + or ( + len(probe_ids) >= 3 + and probe_failed >= max(3, len(probe_ids) - 1) + and severe_probe_failures >= max(2, probe_failed - 1) + ) + ) + ) + + if should_quick_stop: + quick_stop_due_internal = True + for mid in remaining_ids: + url_cache[mid] = None + failed += len(remaining_ids) + elif remaining_ids: + followup_ids = remaining_ids[:MAX_FOLLOWUP_MEDIA_READS] + skipped_ids = remaining_ids[MAX_FOLLOWUP_MEDIA_READS:] + if skipped_ids: + skipped_followup_reads = len(skipped_ids) + for mid in skipped_ids: + url_cache[mid] = None + failed += len(skipped_ids) + sem = asyncio.Semaphore(4) + + async def fetch_with_limit(mid: str): + async with sem: + return await fetch_one_media(mid) + + remaining_rows = await asyncio.gather( + *(fetch_with_limit(mid) for mid in followup_ids), + ) + consume_fetch_results(remaining_rows) + + # Convert freshly fetched signed URLs to stable local proxy URLs when possible. + for mid in unique_media_ids: + fresh_url = url_cache.get(mid) + if not isinstance(fresh_url, str) or not _is_direct_media_url(fresh_url): + continue + local_url = await self.cache_media_locally(mid, fresh_url, project_id=project_id) + if local_url: + url_cache[mid] = local_url + + refreshed = 0 + for scene in scenes: + updates: dict[str, str] = {} + for media_field, url_field in slot_pairs: + mid = scene.get(media_field) + if not isinstance(mid, str) or not mid: + continue + fresh_url = url_cache.get(mid) + if not fresh_url: + continue + if scene.get(url_field) != fresh_url: + updates[url_field] = fresh_url + if updates: + await crud.update_scene(scene["id"], **updates) + refreshed += 1 + + chars_refreshed = 0 + for char in characters: + mid = char.get("media_id") + if not isinstance(mid, str) or not mid: + continue + fresh_url = url_cache.get(mid) + if not fresh_url: + continue + if char.get("reference_image_url") != fresh_url: + await crud.update_character(char["id"], reference_image_url=fresh_url) + chars_refreshed += 1 + + if refreshed or chars_refreshed: + await event_bus.emit( + "urls_refreshed", + {"project_id": project_id, "scenes": refreshed, "characters": chars_refreshed}, + ) + + result_payload = { + "refreshed": refreshed, + "characters_refreshed": chars_refreshed, + "found": len(unique_media_ids), + "failed": failed, + } + if quick_stop_due_internal: + result_payload["fast_failover"] = True + if skipped_followup_reads: + result_payload["skipped_media_reads"] = skipped_followup_reads + if trpc_updated: + result_payload["trpc_refreshed"] = trpc_updated + if trpc_source: + result_payload["trpc_source"] = trpc_source + if tab_updated: + result_payload["tab_refreshed"] = tab_updated + if tab_source: + result_payload["tab_source"] = tab_source + elif tab_attempts: + # keep small payload for UI debugging + result_payload["tab_attempts"] = tab_attempts[:10] + if tab_error: + result_payload["tab_error"] = tab_error + if auth_failed: + result_payload["note"] = "AUTH_EXPIRED: open Flow tab to refresh token then retry." + elif quick_stop_due_internal and failed and not refreshed and not chars_refreshed: + result_payload["note"] = ( + "Google Flow đang trả internal error cho media cũ. " + "Đã dừng quét sâu để tránh treo; hãy mở đúng project trong cửa sổ Flow " + "để extension bắt signed URL mới, rồi thử Làm mới URL lại." + ) + elif skipped_followup_reads and failed: + result_payload["note"] = ( + "Đã giới hạn số lượt đọc media để tránh bão API_500. " + "Mở đúng project trong cửa sổ Flow rồi bấm Làm mới URL thêm lần nữa để lấy nốt." + ) + elif failed and (refreshed or chars_refreshed): + result_payload["note"] = "Một số media đã được làm mới, nhưng vẫn còn media lỗi từ Google Flow." + elif failed and not refreshed and not chars_refreshed: + result_payload["note"] = "Google Flow chưa trả URL mới cho media cũ. Hãy mở lại project trong cửa sổ Flow rồi thử lại." + if errors: + result_payload["errors"] = errors + return result_payload async def _send(self, method: str, params: dict, timeout: float = 300) -> dict: """Send request to extension and wait for response. @@ -213,26 +1699,85 @@ async def _send(self, method: str, params: dict, timeout: float = 300) -> dict: Never raises; exceptions are caught and returned as error dicts. """ if not self._extension_ws: - return {"error": "Extension not connected"} + self._extension_ws = self._choose_live_ws() + if self._extension_ws and getattr(self._extension_ws, "closed", False): + self._extension_ws = self._choose_live_ws() - req_id = str(uuid.uuid4()) - future = asyncio.get_running_loop().create_future() - self._pending[req_id] = future + ws_candidates = self._live_ws_candidates() + if not ws_candidates: + return {"error": "Extension not connected"} - try: - await self._extension_ws.send(json.dumps({ - "id": req_id, - "method": method, - "params": params, - })) - result = await asyncio.wait_for(future, timeout=timeout) - return result - except asyncio.TimeoutError: - return {"error": f"Timeout ({timeout}s) waiting for {method}"} - except Exception as e: - return {"error": str(e)} - finally: - self._pending.pop(req_id, None) + def _prefer_another_ws(result: dict, attempt_index: int) -> bool: + if attempt_index >= len(ws_candidates) - 1: + return False + text = (_extract_error_text(result) or "").lower() + status = result.get("status") + + # For runtime status probes, pick a richer responder if available. + if method == "get_status": + raw_data = result.get("data") + if not isinstance(raw_data, dict): + raw_data = result.get("result") + data = raw_data if isinstance(raw_data, dict) else {} + has_runtime_fields = any( + key in data + for key in ( + "flowTabId", + "flowTabUrl", + "mediaCacheSize", + "projectTabBindings", + ) + ) + if not has_runtime_fields: + return True + return False + + # For Flow-runtime bound methods, retry another ws on tab/runtime errors. + if method in ("api_request", "pull_project_urls", "solve_captcha", "refresh_token"): + retry_markers = ( + "no_flow_tab", + "flow_tab_not_ready", + "extension not connected", + "cannot access contents of the page", + "must request permission to access the respective host", + ) + if any(marker in text for marker in retry_markers): + return True + if isinstance(status, int) and status == 503 and "extension not connected" in text: + return True + return False + + last_error: dict = {"error": "Extension not connected"} + for idx, ws in enumerate(ws_candidates): + self._extension_ws = ws + req_id = str(uuid.uuid4()) + future = asyncio.get_running_loop().create_future() + self._pending[req_id] = future + + try: + await ws.send(json.dumps({ + "id": req_id, + "method": method, + "params": params, + })) + result = await asyncio.wait_for(future, timeout=timeout) + last_error = result + if _prefer_another_ws(result, idx): + logger.info( + "WS candidate #%d returned sparse/unavailable response for %s; trying another extension ws", + idx + 1, + method, + ) + continue + return result + except asyncio.TimeoutError: + last_error = {"error": f"Timeout ({timeout}s) waiting for {method}"} + except Exception as e: + last_error = {"error": str(e)} + finally: + self._pending.pop(req_id, None) + + return last_error def _build_url(self, endpoint_key: str, **kwargs) -> str: """Build full API URL.""" @@ -276,7 +1821,8 @@ async def create_project(self, project_title: str, tool_name: str = "PINHOLE") - async def generate_images(self, prompt: str, project_id: str, aspect_ratio: str = "IMAGE_ASPECT_RATIO_PORTRAIT", user_paygate_tier: str = "PAYGATE_TIER_TWO", - character_media_ids: list[str] = None) -> dict: + character_media_ids: list[str] = None, + image_model_key: str | None = None) -> dict: """Generate image(s). If character_media_ids is provided, uses edit_image flow (batchGenerateImages @@ -289,12 +1835,14 @@ async def generate_images(self, prompt: str, project_id: str, ts = int(time.time() * 1000) ctx = self._client_context(project_id, user_paygate_tier) + selected_image_model = image_model_key or IMAGE_MODELS["NANO_BANANA_PRO"] + request_item = { "clientContext": {**ctx, "sessionId": f";{ts}"}, "seed": ts % 1000000, "structuredPrompt": {"parts": [{"text": prompt}]}, "imageAspectRatio": aspect_ratio, - "imageModelName": IMAGE_MODELS["NANO_BANANA_PRO"], + "imageModelName": selected_image_model, } # Add character references if provided (edit_image flow) @@ -326,7 +1874,8 @@ async def edit_image(self, prompt: str, source_media_id: str, project_id: str, aspect_ratio: str = "IMAGE_ASPECT_RATIO_PORTRAIT", user_paygate_tier: str = "PAYGATE_TIER_ONE", - character_media_ids: list[str] = None) -> dict: + character_media_ids: list[str] = None, + image_model_key: str | None = None) -> dict: """Edit an existing image using IMAGE_INPUT_TYPE_BASE_IMAGE. If character_media_ids is provided, appends them as IMAGE_INPUT_TYPE_REFERENCE @@ -343,12 +1892,14 @@ async def edit_image(self, prompt: str, source_media_id: str, for mid in character_media_ids: image_inputs.append({"name": mid, "imageInputType": "IMAGE_INPUT_TYPE_REFERENCE"}) + selected_image_model = image_model_key or IMAGE_MODELS["NANO_BANANA_PRO"] + request_item = { "clientContext": {**ctx, "sessionId": f";{ts}"}, "seed": ts % 1000000, "structuredPrompt": {"parts": [{"text": prompt}]}, "imageAspectRatio": aspect_ratio, - "imageModelName": IMAGE_MODELS["NANO_BANANA_PRO"], + "imageModelName": selected_image_model, "imageInputs": image_inputs, } @@ -372,7 +1923,8 @@ async def generate_video(self, start_image_media_id: str, prompt: str, project_id: str, scene_id: str, aspect_ratio: str = "VIDEO_ASPECT_RATIO_PORTRAIT", end_image_media_id: str = None, - user_paygate_tier: str = "PAYGATE_TIER_TWO") -> dict: + user_paygate_tier: str = "PAYGATE_TIER_TWO", + video_model_key: str | None = None) -> dict: """Generate video from start image (i2v). Two sub-types: @@ -380,44 +1932,79 @@ async def generate_video(self, start_image_media_id: str, prompt: str, - start_end_frame_2_video (i2v_fl): startImage + endImage (for scene chaining) """ gen_type = "start_end_frame_2_video" if end_image_media_id else "frame_2_video" - model_key = VIDEO_MODELS.get(user_paygate_tier, {}).get(gen_type, {}).get(aspect_ratio) - - if not model_key: + candidates = _resolve_video_model_candidates( + user_paygate_tier=user_paygate_tier, + gen_type=gen_type, + aspect_ratio=aspect_ratio, + requested_model_key=video_model_key, + ) + + if not candidates: return {"error": f"No model for tier={user_paygate_tier} type={gen_type} ratio={aspect_ratio}"} - request = { - "aspectRatio": aspect_ratio, - "seed": int(time.time()) % 10000, - "textInput": {"structuredPrompt": {"parts": [{"text": prompt}]}}, - "videoModelKey": model_key, - "startImage": {"mediaId": start_image_media_id}, - "metadata": {"sceneId": scene_id}, - } - - if end_image_media_id: - request["endImage"] = {"mediaId": end_image_media_id} - endpoint_key = "generate_video_start_end" if end_image_media_id else "generate_video" - body = { - "mediaGenerationContext": {"batchId": f"{uuid.uuid4()}"}, - "clientContext": self._client_context(project_id, user_paygate_tier), - "requests": [request], - "useV2ModelConfig": True, - } - url = self._build_url(endpoint_key) - return await self._send("api_request", { - "url": url, - "method": "POST", - "headers": random_headers(), - "body": body, - "captchaAction": "VIDEO_GENERATION", - }, timeout=60) # Submit only — polling is separate + last_result: dict | None = None + + for idx, (source, model_key, ctx_tier) in enumerate(candidates): + logger.info( + "generate_video attempt %d/%d: gen_type=%s ratio=%s model=%s source=%s context_tier=%s requested_tier=%s", + idx + 1, + len(candidates), + gen_type, + aspect_ratio, + model_key, + source, + ctx_tier, + user_paygate_tier, + ) + + request = { + "aspectRatio": aspect_ratio, + "seed": int(time.time()) % 10000, + "textInput": {"structuredPrompt": {"parts": [{"text": prompt}]}}, + "videoModelKey": model_key, + "startImage": {"mediaId": start_image_media_id}, + "metadata": {"sceneId": scene_id}, + } + if end_image_media_id: + request["endImage"] = {"mediaId": end_image_media_id} + + body = { + "mediaGenerationContext": {"batchId": f"{uuid.uuid4()}"}, + "clientContext": self._client_context(project_id, ctx_tier), + "requests": [request], + "useV2ModelConfig": True, + } + + result = await self._send("api_request", { + "url": url, + "method": "POST", + "headers": random_headers(), + "body": body, + "captchaAction": "VIDEO_GENERATION", + }, timeout=60) + + last_result = result + if not _is_model_access_denied(result): + if idx > 0: + logger.info("generate_video recovered with fallback model=%s", model_key) + return result + + logger.warning( + "generate_video model denied for model=%s (%s): %s", + model_key, + source, + _extract_error_text(result), + ) + + return last_result or {"error": "Video generation failed with all model candidates"} async def generate_video_from_references(self, reference_media_ids: list[str], prompt: str, project_id: str, scene_id: str, aspect_ratio: str = "VIDEO_ASPECT_RATIO_PORTRAIT", - user_paygate_tier: str = "PAYGATE_TIER_TWO") -> dict: + user_paygate_tier: str = "PAYGATE_TIER_TWO", + video_model_key: str | None = None) -> dict: """Generate video from multiple reference images (r2v). Uses referenceImages instead of startImage — the model composes @@ -427,38 +2014,72 @@ async def generate_video_from_references(self, reference_media_ids: list[str], reference_media_ids: List of character media_ids (from uploadImage) """ gen_type = "reference_frame_2_video" - model_key = VIDEO_MODELS.get(user_paygate_tier, {}).get(gen_type, {}).get(aspect_ratio) - - if not model_key: + candidates = _resolve_video_model_candidates( + user_paygate_tier=user_paygate_tier, + gen_type=gen_type, + aspect_ratio=aspect_ratio, + requested_model_key=video_model_key, + ) + + if not candidates: return {"error": f"No model for tier={user_paygate_tier} type={gen_type} ratio={aspect_ratio}"} - request = { - "aspectRatio": aspect_ratio, - "seed": int(time.time()) % 10000, - "textInput": {"structuredPrompt": {"parts": [{"text": prompt}]}}, - "videoModelKey": model_key, - "referenceImages": [ - {"mediaId": mid, "imageUsageType": "IMAGE_USAGE_TYPE_ASSET"} - for mid in reference_media_ids - ], - "metadata": {}, - } - - body = { - "mediaGenerationContext": {"batchId": f"{uuid.uuid4()}"}, - "clientContext": self._client_context(project_id, user_paygate_tier), - "requests": [request], - "useV2ModelConfig": True, - } - url = self._build_url("generate_video_references") - return await self._send("api_request", { - "url": url, - "method": "POST", - "headers": random_headers(), - "body": body, - "captchaAction": "VIDEO_GENERATION", - }, timeout=60) + last_result: dict | None = None + + for idx, (source, model_key, ctx_tier) in enumerate(candidates): + logger.info( + "generate_video_from_references attempt %d/%d: ratio=%s model=%s source=%s context_tier=%s requested_tier=%s", + idx + 1, + len(candidates), + aspect_ratio, + model_key, + source, + ctx_tier, + user_paygate_tier, + ) + + request = { + "aspectRatio": aspect_ratio, + "seed": int(time.time()) % 10000, + "textInput": {"structuredPrompt": {"parts": [{"text": prompt}]}}, + "videoModelKey": model_key, + "referenceImages": [ + {"mediaId": mid, "imageUsageType": "IMAGE_USAGE_TYPE_ASSET"} + for mid in reference_media_ids + ], + "metadata": {}, + } + + body = { + "mediaGenerationContext": {"batchId": f"{uuid.uuid4()}"}, + "clientContext": self._client_context(project_id, ctx_tier), + "requests": [request], + "useV2ModelConfig": True, + } + + result = await self._send("api_request", { + "url": url, + "method": "POST", + "headers": random_headers(), + "body": body, + "captchaAction": "VIDEO_GENERATION", + }, timeout=60) + + last_result = result + if not _is_model_access_denied(result): + if idx > 0: + logger.info("generate_video_from_references recovered with fallback model=%s", model_key) + return result + + logger.warning( + "generate_video_from_references model denied for model=%s (%s): %s", + model_key, + source, + _extract_error_text(result), + ) + + return last_result or {"error": "Reference video generation failed with all model candidates"} async def upscale_video(self, media_id: str, scene_id: str, aspect_ratio: str = "VIDEO_ASPECT_RATIO_PORTRAIT", @@ -504,15 +2125,126 @@ async def check_video_status(self, operations: list[dict]) -> dict: "body": body, }, timeout=30) # No captcha needed - async def get_credits(self) -> dict: + def _credits_cache_fresh(self, *, force: bool, max_age_sec: float | None = None) -> bool: + if force or not self._credits_cache: + return False + if self._credits_cache_flow_key != (self._flow_key or ""): + return False + status = self._credits_cache.get("status") if isinstance(self._credits_cache, dict) else None + is_error = _is_ws_error(self._credits_cache) or (isinstance(status, int) and status >= 400) + ttl = float(FLOW_CREDITS_ERROR_TTL_SEC if is_error else (max_age_sec or FLOW_CREDITS_CACHE_TTL_SEC)) + return (time.time() - self._credits_cached_at) <= ttl + + async def get_credits(self, *, force: bool = False, max_age_sec: float | None = None) -> dict: """Get user credits and tier.""" + if self._credits_cache_fresh(force=force, max_age_sec=max_age_sec): + logger.debug("Credits cache hit (age=%.1fs)", time.time() - self._credits_cached_at) + return dict(self._credits_cache or {}) + + if not force and self._credits_inflight and not self._credits_inflight.done(): + logger.debug("Credits request coalesced with in-flight call") + return dict(await self._credits_inflight) + + async def _fetch_credits() -> dict: + url = self._build_url("get_credits") + result = await self._send("api_request", { + "url": url, + "method": "GET", + "headers": read_headers(), + }, timeout=15) + self._credits_cache = dict(result) + self._credits_cached_at = time.time() + self._credits_cache_flow_key = self._flow_key or "" + return result + + self._credits_inflight = asyncio.create_task(_fetch_credits()) + try: + return dict(await self._credits_inflight) + finally: + if self._credits_inflight and self._credits_inflight.done(): + self._credits_inflight = None + + async def get_credits_uncached(self) -> dict: + """Force a live credits fetch. Keep explicit for diagnostics only.""" url = self._build_url("get_credits") return await self._send("api_request", { "url": url, "method": "GET", - "headers": random_headers(), + "headers": read_headers(), }, timeout=15) + async def refresh_token(self) -> dict: + """Ask extension to re-capture a fresh Flow auth token from the Flow tab.""" + return await self._send("refresh_token", {}, timeout=25) + + async def get_extension_status(self) -> dict: + """Get runtime extension status (not just WS socket presence).""" + if not self.connected: + return { + "connected": False, + "agent_connected": False, + "flow_key_present": bool(self._flow_key), + "state": "off", + "manual_disconnect": False, + "runtime_connected": False, + "token_auth_state": "unknown", + "token_auth_checked_at": None, + "token_auth_error": None, + } + + result = await self._send("get_status", {}, timeout=2.5) + if _is_ws_error(result): + return { + "connected": True, + "agent_connected": False, + "flow_key_present": bool(self._flow_key), + "state": "off", + "manual_disconnect": False, + "runtime_connected": False, + "error": _extract_error_text(result) or "STATUS_UNAVAILABLE", + "token_auth_state": "unknown", + "token_auth_checked_at": None, + "token_auth_error": None, + } + + raw_data = result.get("data") + if not isinstance(raw_data, dict): + raw_data = result.get("result") + data = raw_data if isinstance(raw_data, dict) else result + if not data or not isinstance(data, dict): + logger.warning("get_extension_status unexpected payload: %s", str(result)[:260]) + data = {} + elif not data.get("state") and not data.get("flowKeyPresent"): + logger.warning("get_extension_status sparse payload: %s", str(result)[:260]) + agent_connected = bool(data.get("agentConnected", data.get("connected", self.connected))) + state = str(data.get("state") or ("idle" if agent_connected else "off")).lower() + manual_disconnect = bool(data.get("manualDisconnect", False)) + flow_key_present = bool(data.get("flowKeyPresent", self._flow_key)) + runtime_connected = agent_connected and not manual_disconnect and state != "off" + return { + "connected": self.connected, + "agent_connected": agent_connected, + "flow_key_present": flow_key_present, + "state": state, + "manual_disconnect": manual_disconnect, + "runtime_connected": runtime_connected, + "flow_tab_id": data.get("flowTabId"), + "flow_tab_url": data.get("flowTabUrl"), + "flow_tab_seen_at": data.get("flowTabSeenAt"), + "token_age_ms": data.get("tokenAge"), + "token_auth_state": data.get("tokenAuthState") + or (data.get("metrics") or {}).get("tokenAuthState") + or "unknown", + "token_auth_checked_at": data.get("tokenAuthCheckedAt") + or (data.get("metrics") or {}).get("tokenAuthCheckedAt"), + "token_auth_error": data.get("tokenAuthError") + or (data.get("metrics") or {}).get("tokenAuthError"), + "metrics": data.get("metrics"), + "media_cache_size": data.get("mediaCacheSize"), + "project_tab_bindings": data.get("projectTabBindings"), + "debug_flow_tabs": data.get("debugFlowTabs"), + } + async def validate_media_id(self, media_id: str) -> bool: """Check if a mediaId is still valid. @@ -523,18 +2255,112 @@ async def validate_media_id(self, media_id: str) -> bool: status = result.get("status", 500) return isinstance(status, int) and status == 200 - async def get_media(self, media_id: str) -> dict: + async def get_media( + self, + media_id: str, + project_id: str | None = None, + *, + timeout_sec: float = 20, + ) -> dict: """Fetch media metadata from Google Flow. Returns the raw API response which contains a fresh signed URL in data.fifeUrl or data.servingUri. """ - url = f"{GOOGLE_FLOW_API}/v1/media/{media_id}?key={GOOGLE_API_KEY}&clientContext.tool=PINHOLE" - return await self._send("api_request", { - "url": url, - "method": "GET", - "headers": random_headers(), - }, timeout=15) + async def _pull_from_project_tab() -> dict | None: + if not project_id: + return None + now = time.time() + cool_until = self._project_pull_cooldown_until.get(project_id, 0.0) + if now < cool_until: + return None + self._project_pull_cooldown_until[project_id] = now + 1.2 + pull = await self._send( + "pull_project_urls", + { + "projectId": project_id, + "mediaHints": [{"mediaId": media_id, "mediaType": ""}], + "forceFresh": True, + }, + timeout=max(12, min(30, timeout_sec)), + ) + pull_status = pull.get("status") + if _is_ws_error(pull) or (isinstance(pull_status, int) and pull_status >= 400): + return None + + raw_data = pull.get("data") + if not isinstance(raw_data, dict): + raw_data = pull.get("result") + data = raw_data if isinstance(raw_data, dict) else {} + entries = data.get("entries") if isinstance(data.get("entries"), list) else [] + for row in entries: + if not isinstance(row, dict): + continue + if str(row.get("mediaId") or "").lower().strip() != media_id.lower(): + continue + url = str(row.get("url") or "").strip() + if not _is_direct_media_url(url): + continue + return { + "status": 200, + "data": { + "name": media_id, + "fifeUrl": url, + "url": url, + "_source": "pull_project_urls", + }, + } + return None + + # Fast path: prefer URLs already visible in active Flow tab before hitting /v1/media. + # This avoids intermittent API_500 storms for older media IDs. + if project_id and timeout_sec >= 8: + pulled = await _pull_from_project_tab() + if pulled: + return pulled + + base = f"{GOOGLE_FLOW_API}/v1/media/{media_id}?key={GOOGLE_API_KEY}&clientContext.tool=PINHOLE" + candidates: list[tuple[str, str]] = [] + if project_id: + candidates.append(("project", f"{base}&clientContext.projectId={project_id}")) + candidates.append(("global", base)) + + last_result: dict = {"error": "MEDIA_FETCH_FAILED"} + for mode, url in candidates: + result = await self._send("api_request", { + "url": url, + "method": "GET", + "headers": read_headers(), + }, timeout=timeout_sec) + last_result = result + status = result.get("status") + if not _is_ws_error(result) and (not isinstance(status, int) or status < 400): + direct_url = self._extract_media_url_from_result(result, media_id) + if direct_url: + return result + # HTTP 200 but no direct URL in payload: try scraping signed URLs from Flow tab. + if project_id and timeout_sec >= 8: + pulled = await _pull_from_project_tab() + if pulled: + return pulled + return result + logger.info( + "get_media failed (%s) media=%s status=%s err=%s", + mode, + media_id[:12], + status, + _extract_error_text(result)[:180], + ) + + # Fallback for image-like INTERNAL errors: ask extension for cached/project URLs. + # Keep this path only for direct UI reads (long timeout), not bulk refresh loops. + err_text = _extract_error_text(last_result) + if project_id and timeout_sec >= 18 and _is_internal_error_text(err_text): + pulled = await _pull_from_project_tab() + if pulled: + return pulled + + return last_result async def upload_image(self, image_base64: str, mime_type: str = "image/jpeg", project_id: str = "", file_name: str = "image.jpg") -> dict: diff --git a/agent/services/headers.py b/agent/services/headers.py index b7e1ac3..6ee26b3 100644 --- a/agent/services/headers.py +++ b/agent/services/headers.py @@ -38,3 +38,21 @@ def random_headers() -> dict: "x-browser-year": "2025", "x-client-data": client_data, } + + +def read_headers() -> dict: + """Headers for read-only endpoints (credits/media/status polling). + + Keep deterministic browser-like headers so Google Flow read endpoints can + be called from extension context without relying on randomization. + """ + return { + "accept": "*/*", + "accept-language": "en-US,en;q=0.9", + "content-type": "text/plain;charset=UTF-8", + "origin": "https://labs.google", + "referer": "https://labs.google/", + "sec-fetch-dest": "empty", + "sec-fetch-mode": "cors", + "sec-fetch-site": "cross-site", + } diff --git a/agent/services/local_upscaler.py b/agent/services/local_upscaler.py new file mode 100644 index 0000000..906601d --- /dev/null +++ b/agent/services/local_upscaler.py @@ -0,0 +1,721 @@ +"""Local 4K video upscaler using Real-ESRGAN + ffmpeg.""" +from __future__ import annotations + +import asyncio +import logging +import os +import shutil +import sys +import tempfile +from contextlib import suppress +from dataclasses import dataclass +from pathlib import Path +from typing import Any +from urllib.parse import parse_qs, quote, unquote, urlparse + +from agent.config import API_HOST, API_PORT, BASE_DIR, OUTPUT_DIR +from agent.db import crud +from agent.services.flow_client import get_flow_client +from agent.utils.paths import scene_4k_path +from agent.utils.slugify import slugify + +logger = logging.getLogger(__name__) + + +def _env_int(name: str, default: int, *, min_value: int | None = None, max_value: int | None = None) -> int: + raw = os.environ.get(name) + try: + value = int(raw) if raw is not None else default + except Exception: + value = default + if min_value is not None: + value = max(min_value, value) + if max_value is not None: + value = min(max_value, value) + return value + + +def _env_bool(name: str, default: bool) -> bool: + raw = os.environ.get(name) + if raw is None: + return default + return raw.strip().lower() not in {"0", "false", "no", "off"} + + +LOCAL_UPSCALE_SETUP_MARKER = "LOCAL_UPSCALE_SETUP_REQUIRED" +DEFAULT_LOCAL_UPSCALE_ENGINE = (os.environ.get("LOCAL_UPSCALE_ENGINE", "fast").strip().lower() or "fast") +DEFAULT_LOCAL_UPSCALE_MODEL = os.environ.get("LOCAL_UPSCALE_MODEL", "realesrgan-x4plus").strip() or "realesrgan-x4plus" +DEFAULT_LOCAL_UPSCALE_SCALE = _env_int("LOCAL_UPSCALE_SCALE", 4, min_value=2, max_value=4) +DEFAULT_LOCAL_UPSCALE_TIMEOUT_SEC = _env_int("LOCAL_UPSCALE_TIMEOUT_SEC", 900, min_value=120, max_value=3600) +DEFAULT_LOCAL_UPSCALE_EXTRACT_TIMEOUT_SEC = _env_int("LOCAL_UPSCALE_EXTRACT_TIMEOUT_SEC", 480, min_value=60, max_value=1800) +DEFAULT_LOCAL_UPSCALE_ENCODE_TIMEOUT_SEC = _env_int("LOCAL_UPSCALE_ENCODE_TIMEOUT_SEC", 600, min_value=60, max_value=2400) +DEFAULT_LOCAL_UPSCALE_PRESET = os.environ.get("LOCAL_UPSCALE_PRESET", "slow").strip() or "slow" +DEFAULT_LOCAL_UPSCALE_FFMPEG_THREADS = _env_int("LOCAL_UPSCALE_FFMPEG_THREADS", 1, min_value=1, max_value=16) +DEFAULT_LOCAL_UPSCALE_REALESRGAN_JOBS = ( + os.environ.get("LOCAL_UPSCALE_REALESRGAN_JOBS", "1:1:1").strip() or "1:1:1" +) +DEFAULT_LOCAL_UPSCALE_REQUIRE_LOCAL_SOURCE = _env_bool("LOCAL_UPSCALE_REQUIRE_LOCAL_SOURCE", True) +DEFAULT_LOCAL_UPSCALE_FAST_PRESET = os.environ.get("LOCAL_UPSCALE_FAST_PRESET", "veryfast").strip() or "veryfast" +DEFAULT_LOCAL_UPSCALE_AUTO_MAX_FRAMES = _env_int("LOCAL_UPSCALE_AUTO_MAX_FRAMES", 96, min_value=24, max_value=300) +DEFAULT_LOCAL_UPSCALE_AUTO_MAX_DURATION_SEC = _env_int("LOCAL_UPSCALE_AUTO_MAX_DURATION_SEC", 4, min_value=2, max_value=20) + +_API_PUBLIC_HOST = "127.0.0.1" if API_HOST in {"0.0.0.0", "::"} else API_HOST +_LOCAL_MEDIA_PROXY_BASE = f"http://{_API_PUBLIC_HOST}:{API_PORT}/api/flow/local-media" + + +def local_upscale_dispatch_timeout_sec() -> int: + """Timeout budget for one local upscale request in worker dispatch.""" + margin = _env_int("LOCAL_UPSCALE_DISPATCH_MARGIN_SEC", 90, min_value=30, max_value=600) + return ( + DEFAULT_LOCAL_UPSCALE_EXTRACT_TIMEOUT_SEC + + DEFAULT_LOCAL_UPSCALE_TIMEOUT_SEC + + DEFAULT_LOCAL_UPSCALE_ENCODE_TIMEOUT_SEC + + margin + ) + + +@dataclass(frozen=True) +class LocalUpscaleTools: + ffmpeg: str + ffprobe: str + realesrgan: str + model_dir: Path + model_name: str + scale: int + + +def _extract_local_media_path(url: str | None) -> Path | None: + if not isinstance(url, str): + return None + text = url.strip() + if not text: + return None + + if text.startswith("http://") or text.startswith("https://"): + try: + parsed = urlparse(text) + if (parsed.hostname or "").lower() not in {"127.0.0.1", "localhost"}: + return None + if parsed.path.rstrip("/") != "/api/flow/local-media": + return None + raw_path = (parse_qs(parsed.query).get("path") or [None])[0] + if not isinstance(raw_path, str) or not raw_path.strip(): + return None + candidate = Path(unquote(raw_path)).expanduser() + return candidate if candidate.is_absolute() else None + except Exception: + return None + + if text.startswith("file://"): + try: + parsed = urlparse(text) + candidate = Path(unquote(parsed.path)).expanduser() + return candidate if candidate.is_absolute() else None + except Exception: + return None + + candidate = Path(text).expanduser() + return candidate if candidate.is_absolute() else None + + +def _build_local_media_proxy_url(path: Path) -> str: + return f"{_LOCAL_MEDIA_PROXY_BASE}?path={quote(str(path), safe='')}" + + +def _is_direct_media_url(url: str | None) -> bool: + if not isinstance(url, str): + return False + text = url.strip().lower() + if not text.startswith("http"): + return False + if "media.getmediaurlredirect" in text: + return False + if text.startswith("https://flow-content.google/"): + return True + if text.startswith("https://storage.googleapis.com/"): + return True + if "googleusercontent.com/" in text: + return True + return False + + +def _extract_first_direct_url(payload: Any) -> str | None: + candidates: list[str] = [] + + def _walk(node: Any) -> None: + if isinstance(node, dict): + for key in ("fifeUrl", "servingUri", "url", "imageUri", "videoUri"): + val = node.get(key) + if isinstance(val, str): + candidates.append(val) + for val in node.values(): + _walk(val) + return + if isinstance(node, list): + for item in node: + _walk(item) + + _walk(payload) + for url in candidates: + if _is_direct_media_url(url): + return url + return None + + +def _resolve_binary(env_var: str, candidates: list[str]) -> str | None: + raw = (os.environ.get(env_var) or "").strip() + if raw: + p = Path(raw).expanduser() + if p.exists(): + return str(p.resolve()) + found = shutil.which(raw) + if found: + return found + for cand in candidates: + p = Path(cand).expanduser() + if p.exists(): + return str(p.resolve()) + found = shutil.which(cand) + if found: + return found + return None + + +def _runtime_platform() -> str: + if os.name == "nt": + return "win32" + if sys.platform == "darwin": + return "darwin" + return sys.platform + + +def _runtime_root_candidates() -> list[Path]: + roots: list[Path] = [] + env_root = (os.environ.get("LOCAL_UPSCALE_RUNTIME_ROOT") or "").strip() + if env_root: + roots.append(Path(env_root).expanduser()) + + base = BASE_DIR / "third_party" + roots.append(base / _runtime_platform()) + roots.append(base) + + dedup: list[Path] = [] + seen: set[str] = set() + for root in roots: + key = str(root) + if key in seen: + continue + seen.add(key) + dedup.append(root) + return dedup + + +def _resolve_tools() -> LocalUpscaleTools: + runtime_roots = _runtime_root_candidates() + ffmpeg = _resolve_binary("LOCAL_UPSCALE_FFMPEG", ["ffmpeg"]) + ffprobe = _resolve_binary("LOCAL_UPSCALE_FFPROBE", ["ffprobe"]) + + realesrgan_candidates = ["realesrgan-ncnn-vulkan", "realesrgan-ncnn-vulkan.exe"] + for root in runtime_roots: + realesrgan_candidates.extend( + [ + str(root / "realesrgan" / "realesrgan-ncnn-vulkan"), + str(root / "realesrgan" / "realesrgan-ncnn-vulkan.exe"), + ] + ) + + realesrgan = _resolve_binary( + "LOCAL_UPSCALE_BIN", + realesrgan_candidates, + ) + + model_dir_env = (os.environ.get("LOCAL_UPSCALE_MODEL_DIR") or "").strip() + if model_dir_env: + model_dir = Path(model_dir_env).expanduser() + else: + default_model_dirs = [root / "realesrgan" / "models" for root in runtime_roots] + default_model_dirs.append(BASE_DIR / "third_party" / "realesrgan" / "models") + model_dir = next((path for path in default_model_dirs if path.exists()), default_model_dirs[0]) + + model_name = DEFAULT_LOCAL_UPSCALE_MODEL + scale = DEFAULT_LOCAL_UPSCALE_SCALE + + missing: list[str] = [] + if not ffmpeg: + missing.append("ffmpeg") + if not ffprobe: + missing.append("ffprobe") + if not realesrgan: + missing.append("realesrgan-ncnn-vulkan") + if not model_dir.exists(): + missing.append(f"model_dir:{model_dir}") + else: + model_file_candidates = [model_dir / f"{model_name}.param", model_dir / f"{model_name}.bin"] + if not all(p.exists() for p in model_file_candidates): + missing.append(f"model:{model_name} in {model_dir}") + + if missing: + raise RuntimeError( + f"{LOCAL_UPSCALE_SETUP_MARKER}: thiếu {'; '.join(missing)}. " + "Thiết lập LOCAL_UPSCALE_BIN + LOCAL_UPSCALE_MODEL_DIR hoặc cài Real-ESRGAN ncnn." + ) + + return LocalUpscaleTools( + ffmpeg=ffmpeg, + ffprobe=ffprobe, + realesrgan=realesrgan, + model_dir=model_dir, + model_name=model_name, + scale=scale, + ) + + +async def _run_cmd(cmd: list[str], *, timeout_sec: int, cwd: Path | None = None) -> tuple[bool, str]: + proc = await asyncio.create_subprocess_exec( + *cmd, + cwd=str(cwd) if cwd else None, + stdout=asyncio.subprocess.PIPE, + stderr=asyncio.subprocess.PIPE, + ) + try: + stdout, stderr = await asyncio.wait_for(proc.communicate(), timeout=timeout_sec) + except asyncio.TimeoutError: + with suppress(Exception): + proc.kill() + with suppress(Exception): + await asyncio.wait_for(proc.wait(), timeout=5) + return False, f"timeout after {timeout_sec}s: {' '.join(cmd[:4])}..." + except asyncio.CancelledError: + with suppress(Exception): + proc.kill() + with suppress(Exception): + await asyncio.wait_for(proc.wait(), timeout=5) + raise RuntimeError( + f"Local upscale command cancelled (dispatch timeout): {' '.join(cmd[:4])}..." + ) + if proc.returncode != 0: + out_tail = (stdout or b"")[-240:].decode("utf-8", errors="ignore") + err_tail = (stderr or b"")[-480:].decode("utf-8", errors="ignore") + return False, (err_tail or out_tail or f"exit {proc.returncode}").strip() + return True, "" + + +async def _probe_avg_fps(ffprobe_bin: str, source: str) -> str: + cmd = [ + ffprobe_bin, + "-v", + "error", + "-select_streams", + "v:0", + "-show_entries", + "stream=avg_frame_rate", + "-of", + "default=nokey=1:noprint_wrappers=1", + source, + ] + try: + proc = await asyncio.create_subprocess_exec( + *cmd, + stdout=asyncio.subprocess.PIPE, + stderr=asyncio.subprocess.PIPE, + ) + stdout, stderr = await asyncio.wait_for(proc.communicate(), timeout=30) + if proc.returncode != 0: + err = (stderr or b"").decode("utf-8", errors="ignore").strip() + logger.warning("ffprobe fps failed: %s", err[-240:] if err else f"exit {proc.returncode}") + return "30" + fps = (stdout or b"").decode("utf-8", errors="ignore").strip() + return fps or "30" + except Exception: + return "30" + + +async def _probe_video_meta(ffprobe_bin: str, source: str) -> dict[str, float]: + cmd = [ + ffprobe_bin, + "-v", + "error", + "-select_streams", + "v:0", + "-show_entries", + "stream=avg_frame_rate,width,height:format=duration", + "-of", + "default=nokey=1:noprint_wrappers=1", + source, + ] + try: + proc = await asyncio.create_subprocess_exec( + *cmd, + stdout=asyncio.subprocess.PIPE, + stderr=asyncio.subprocess.PIPE, + ) + stdout, _ = await asyncio.wait_for(proc.communicate(), timeout=20) + if proc.returncode != 0: + return {"fps": 30.0, "duration": 0.0, "width": 0.0, "height": 0.0, "frames": 0.0} + lines = (stdout or b"").decode("utf-8", errors="ignore").strip().splitlines() + if len(lines) < 4: + return {"fps": 30.0, "duration": 0.0, "width": 0.0, "height": 0.0, "frames": 0.0} + fps_text, width_text, height_text, duration_text = lines[:4] + fps = 30.0 + if "/" in fps_text: + num, den = fps_text.split("/", 1) + fps = float(num) / max(1.0, float(den)) + elif fps_text: + fps = float(fps_text) + width = float(width_text or 0.0) + height = float(height_text or 0.0) + duration = float(duration_text or 0.0) + frames = max(0.0, fps * duration) + return {"fps": fps, "duration": duration, "width": width, "height": height, "frames": frames} + except Exception: + return {"fps": 30.0, "duration": 0.0, "width": 0.0, "height": 0.0, "frames": 0.0} + + +def _choose_upscale_engine(meta: dict[str, float]) -> str: + engine = DEFAULT_LOCAL_UPSCALE_ENGINE + if engine in {"fast", "ai"}: + return engine + # auto mode: only use AI for short clips; default to fast for stability/speed. + frames = float(meta.get("frames", 0.0) or 0.0) + duration = float(meta.get("duration", 0.0) or 0.0) + if frames <= DEFAULT_LOCAL_UPSCALE_AUTO_MAX_FRAMES and duration <= DEFAULT_LOCAL_UPSCALE_AUTO_MAX_DURATION_SEC: + return "ai" + return "fast" + + +async def _upscale_video_fast( + tools: LocalUpscaleTools, + *, + source: str, + target_w: int, + target_h: int, + fps: str, + output_path: Path, +) -> tuple[bool, str]: + cmd = [ + tools.ffmpeg, + "-hide_banner", + "-loglevel", + "error", + "-y", + "-threads", + str(DEFAULT_LOCAL_UPSCALE_FFMPEG_THREADS), + "-i", + source, + "-map", + "0:v:0", + "-map", + "0:a?", + "-vf", + ( + f"scale={target_w}:{target_h}:flags=lanczos:force_original_aspect_ratio=decrease," + f"pad={target_w}:{target_h}:(ow-iw)/2:(oh-ih)/2," + "unsharp=5:5:0.8:3:3:0.35" + ), + "-r", + fps, + "-c:v", + "libx264", + "-preset", + DEFAULT_LOCAL_UPSCALE_FAST_PRESET, + "-crf", + "17", + "-pix_fmt", + "yuv420p", + "-c:a", + "aac", + "-b:a", + "192k", + "-movflags", + "+faststart", + "-shortest", + str(output_path), + ] + return await _run_cmd(cmd, timeout_sec=DEFAULT_LOCAL_UPSCALE_ENCODE_TIMEOUT_SEC) + + +async def _resolve_source_video( + scene: dict, + orientation: str, + project_id: str | None, + *, + allow_remote_fallback: bool, +) -> str | None: + prefix = "vertical" if orientation == "VERTICAL" else "horizontal" + video_url = scene.get(f"{prefix}_video_url") + media_id = scene.get(f"{prefix}_video_media_id") + + local_path = _extract_local_media_path(video_url) + if local_path and local_path.exists() and local_path.is_file(): + return str(local_path) + + client = get_flow_client() + normalized_pid = str(project_id or "").strip().lower() or None + + if isinstance(media_id, str) and media_id: + local_url = await client.find_local_media_url(media_id, project_id=normalized_pid) + local_path = _extract_local_media_path(local_url) + if local_path and local_path.exists() and local_path.is_file(): + return str(local_path) + + if not allow_remote_fallback: + return None + + if isinstance(media_id, str) and media_id and _is_direct_media_url(video_url): + local_url = await client.cache_media_locally(media_id, video_url, project_id=normalized_pid) + local_path = _extract_local_media_path(local_url) + if local_path and local_path.exists() and local_path.is_file(): + return str(local_path) + + if isinstance(media_id, str) and media_id and client.connected: + media_resp = await client.get_media(media_id, project_id=normalized_pid, timeout_sec=20) + if not media_resp.get("error"): + payload = media_resp.get("data", media_resp) + fresh_url = _extract_first_direct_url(payload) + if _is_direct_media_url(fresh_url): + local_url = await client.cache_media_locally(media_id, fresh_url, project_id=normalized_pid) + local_path = _extract_local_media_path(local_url) + if local_path and local_path.exists() and local_path.is_file(): + return str(local_path) + return fresh_url + + if _is_direct_media_url(video_url): + return video_url + return None + + +async def upscale_scene_video_local( + scene: dict, + orientation: str, + *, + project_id: str | None = None, +) -> dict: + """Upscale a scene video to 4K locally and return a Flow-like operation payload.""" + try: + tools = _resolve_tools() + except Exception as exc: + return {"error": str(exc)} + + scene_id = str(scene.get("id") or "") + if not scene_id: + return {"error": "Missing scene id for local upscale"} + + source = await _resolve_source_video( + scene, + orientation, + project_id, + allow_remote_fallback=not DEFAULT_LOCAL_UPSCALE_REQUIRE_LOCAL_SOURCE, + ) + if not source: + if DEFAULT_LOCAL_UPSCALE_REQUIRE_LOCAL_SOURCE: + return { + "error": ( + "No local source video available for local upscale. " + "Hay tai video local truoc (download video) hoac dat " + "LOCAL_UPSCALE_REQUIRE_LOCAL_SOURCE=0 de cho phep fallback online." + ) + } + return {"error": "No source video available for local upscale"} + + video = await crud.get_video(scene.get("video_id")) if scene.get("video_id") else None + project = await crud.get_project((video or {}).get("project_id")) if video and video.get("project_id") else None + project_seed = ( + (project or {}).get("name") + or (video or {}).get("project_id") + or scene.get("video_id") + or "project" + ) + project_slug = slugify(str(project_seed)) or "project" + display_order = int(scene.get("display_order") or 0) + 1 + output_path = scene_4k_path(project_slug, display_order, scene_id) + output_path.parent.mkdir(parents=True, exist_ok=True) + + target_w, target_h = (2160, 3840) if orientation == "VERTICAL" else (3840, 2160) + fps = await _probe_avg_fps(tools.ffprobe, source) + video_meta = await _probe_video_meta(tools.ffprobe, source) + engine = _choose_upscale_engine(video_meta) + logger.info( + "Local upscale start scene=%s orientation=%s source=%s engine=%s ffmpeg_threads=%d jobs=%s frames=%.0f duration=%.2fs", + scene_id[:8], + orientation, + source, + engine, + DEFAULT_LOCAL_UPSCALE_FFMPEG_THREADS, + DEFAULT_LOCAL_UPSCALE_REALESRGAN_JOBS, + float(video_meta.get("frames", 0.0) or 0.0), + float(video_meta.get("duration", 0.0) or 0.0), + ) + + if engine == "fast": + ok, msg = await _upscale_video_fast( + tools, + source=source, + target_w=target_w, + target_h=target_h, + fps=fps, + output_path=output_path, + ) + if not ok: + return {"error": f"Local upscale fast failed: {msg}"} + if not output_path.exists(): + return {"error": "Local upscale fast failed: output file missing"} + local_url = _build_local_media_proxy_url(output_path) + return { + "data": { + "operations": [ + { + "operation": { + "name": f"local-upscale-{scene_id[:8]}", + "metadata": { + "video": { + "fifeUrl": local_url, + } + }, + }, + "status": "MEDIA_GENERATION_STATUS_SUCCESSFUL", + } + ] + } + } + + tmp_root = OUTPUT_DIR / "_tmp" / "local_upscale" + tmp_root.mkdir(parents=True, exist_ok=True) + + with tempfile.TemporaryDirectory(prefix=f"{scene_id[:10]}_", dir=str(tmp_root)) as td: + workdir = Path(td) + frames_in = workdir / "frames_in" + frames_up = workdir / "frames_up" + frames_in.mkdir(parents=True, exist_ok=True) + frames_up.mkdir(parents=True, exist_ok=True) + + extract_cmd = [ + tools.ffmpeg, + "-hide_banner", + "-loglevel", + "error", + "-y", + "-threads", + str(DEFAULT_LOCAL_UPSCALE_FFMPEG_THREADS), + "-i", + source, + "-vsync", + "0", + str(frames_in / "frame_%08d.png"), + ] + ok, msg = await _run_cmd(extract_cmd, timeout_sec=DEFAULT_LOCAL_UPSCALE_EXTRACT_TIMEOUT_SEC) + if not ok: + return {"error": f"Local upscale extract failed: {msg}"} + + upscale_cmd = [ + tools.realesrgan, + "-i", + str(frames_in), + "-o", + str(frames_up), + "-n", + tools.model_name, + "-s", + str(tools.scale), + "-f", + "png", + "-m", + str(tools.model_dir), + ] + if DEFAULT_LOCAL_UPSCALE_REALESRGAN_JOBS: + upscale_cmd.extend(["-j", DEFAULT_LOCAL_UPSCALE_REALESRGAN_JOBS]) + ok, msg = await _run_cmd(upscale_cmd, timeout_sec=DEFAULT_LOCAL_UPSCALE_TIMEOUT_SEC) + if not ok: + return {"error": f"Local upscale Real-ESRGAN failed: {msg}"} + + encode_cmd = [ + tools.ffmpeg, + "-hide_banner", + "-loglevel", + "error", + "-y", + "-threads", + str(DEFAULT_LOCAL_UPSCALE_FFMPEG_THREADS), + "-framerate", + fps, + "-i", + str(frames_up / "frame_%08d.png"), + "-i", + source, + "-map", + "0:v:0", + "-map", + "1:a?", + "-vf", + ( + f"scale={target_w}:{target_h}:force_original_aspect_ratio=decrease," + f"pad={target_w}:{target_h}:(ow-iw)/2:(oh-ih)/2," + "unsharp=5:5:0.8:3:3:0.35" + ), + "-c:v", + "libx264", + "-preset", + DEFAULT_LOCAL_UPSCALE_PRESET, + "-crf", + "16", + "-pix_fmt", + "yuv420p", + "-c:a", + "aac", + "-b:a", + "192k", + "-movflags", + "+faststart", + "-shortest", + str(output_path), + ] + ok, msg = await _run_cmd(encode_cmd, timeout_sec=DEFAULT_LOCAL_UPSCALE_ENCODE_TIMEOUT_SEC) + if not ok: + return {"error": f"Local upscale encode failed: {msg}"} + + if not output_path.exists(): + return {"error": "Local upscale failed: output file missing"} + + local_url = _build_local_media_proxy_url(output_path) + return { + "data": { + "operations": [ + { + "operation": { + "name": f"local-upscale-{scene_id[:8]}", + "metadata": { + "video": { + "fifeUrl": local_url, + } + }, + }, + "status": "MEDIA_GENERATION_STATUS_SUCCESSFUL", + } + ] + } + } + + +def local_upscale_health() -> dict: + """Return availability of local 4K upscaler dependencies.""" + try: + tools = _resolve_tools() + return { + "ready": True, + "ffmpeg": tools.ffmpeg, + "ffprobe": tools.ffprobe, + "realesrgan": tools.realesrgan, + "model_dir": str(tools.model_dir), + "model_name": tools.model_name, + "scale": tools.scale, + "engine": DEFAULT_LOCAL_UPSCALE_ENGINE, + "fast_preset": DEFAULT_LOCAL_UPSCALE_FAST_PRESET, + "require_local_source": DEFAULT_LOCAL_UPSCALE_REQUIRE_LOCAL_SOURCE, + "ffmpeg_threads": DEFAULT_LOCAL_UPSCALE_FFMPEG_THREADS, + "realesrgan_jobs": DEFAULT_LOCAL_UPSCALE_REALESRGAN_JOBS, + "dispatch_timeout_sec": local_upscale_dispatch_timeout_sec(), + } + except Exception as exc: + return { + "ready": False, + "error": str(exc), + } diff --git a/agent/services/post_process.py b/agent/services/post_process.py index a8287c0..382d7ce 100644 --- a/agent/services/post_process.py +++ b/agent/services/post_process.py @@ -67,8 +67,13 @@ def merge_videos(video_paths: list[str], output_path: str) -> bool: def add_narration(video_path: str, narration_path: str, output_path: str, narration_volume: float = 1.0, sfx_volume: float = 0.4, - fade_in: float = 0.5, fade_out: float = 0.5) -> bool: - """Overlay narration audio on video, ducking the existing SFX track.""" + fade_in: float = 0.5, fade_out: float = 0.5, + replace_original: bool = False) -> bool: + """Add narration audio on video. + + replace_original=False: mix narration with original SFX track. + replace_original=True: replace original audio track with narration. + """ if not Path(video_path).exists(): logger.error("add_narration: video file not found: %s", video_path) return False @@ -93,16 +98,28 @@ def add_narration(video_path: str, narration_path: str, output_path: str, return False fade_start = max(0, duration - fade_out) - cmd = [ - "ffmpeg", "-y", "-i", video_path, "-i", narration_path, - "-c:v", "copy", "-c:a", "aac", "-b:a", "192k", - "-filter_complex", - f"[0:a]volume={sfx_volume}[sfx];[1:a]volume={narration_volume},afade=t=in:st=0:d={fade_in},afade=t=out:st={fade_start}:d={fade_out}[narr];[sfx][narr]amerge=inputs=2,pan=stereo|c0=c0+c2|c1=c1+c3[aout]", - "-map", "0:v", "-map", "[aout]", - "-shortest", - "-movflags", "+faststart", - output_path, - ] + if replace_original: + cmd = [ + "ffmpeg", "-y", "-i", video_path, "-i", narration_path, + "-c:v", "copy", "-c:a", "aac", "-b:a", "192k", + "-filter_complex", + f"[1:a]volume={narration_volume},afade=t=in:st=0:d={fade_in},afade=t=out:st={fade_start}:d={fade_out},apad[aout]", + "-map", "0:v", "-map", "[aout]", + "-shortest", + "-movflags", "+faststart", + output_path, + ] + else: + cmd = [ + "ffmpeg", "-y", "-i", video_path, "-i", narration_path, + "-c:v", "copy", "-c:a", "aac", "-b:a", "192k", + "-filter_complex", + f"[0:a]volume={sfx_volume}[sfx];[1:a]volume={narration_volume},afade=t=in:st=0:d={fade_in},afade=t=out:st={fade_start}:d={fade_out}[narr];[sfx][narr]amerge=inputs=2,pan=stereo|c0=c0+c2|c1=c1+c3[aout]", + "-map", "0:v", "-map", "[aout]", + "-shortest", + "-movflags", "+faststart", + output_path, + ] result = subprocess.run(cmd, capture_output=True, text=True, timeout=120) if result.returncode != 0: logger.error("Add narration failed: %s", result.stderr[-200:]) diff --git a/agent/services/tts.py b/agent/services/tts.py index 14a7298..2351ae1 100644 --- a/agent/services/tts.py +++ b/agent/services/tts.py @@ -1,4 +1,9 @@ -"""OmniVoice TTS service — subprocess-based for compatibility.""" +"""TTS service layer. + +Supports two providers: +- elevenlabs (default) +- omnivoice (legacy local model) +""" import asyncio import json import logging @@ -7,7 +12,10 @@ from pathlib import Path from typing import Optional +import httpx + from agent.config import TTS_MODEL, TTS_SAMPLE_RATE +from agent.services.tts_settings import get_tts_settings logger = logging.getLogger(__name__) @@ -73,7 +81,114 @@ """ -async def generate_speech( +def _tts_provider() -> str: + settings = get_tts_settings() + provider = str(settings.get("provider") or "elevenlabs").strip().lower() + return provider if provider in {"elevenlabs", "omnivoice"} else "elevenlabs" + + +def _run_tts_subprocess(args: dict) -> dict: + """Run OmniVoice TTS subprocess.""" + proc = subprocess.run( + [PYTHON_BIN, "-c", _TTS_SCRIPT, json.dumps(args)], + capture_output=True, text=True, timeout=120, + ) + if proc.returncode != 0: + return {"ok": False, "error": proc.stderr[-500:] if proc.stderr else "unknown error"} + try: + return json.loads(proc.stdout.strip().split("\n")[-1]) + except (json.JSONDecodeError, IndexError): + return {"ok": False, "error": proc.stdout[-200:] + proc.stderr[-200:]} + + +def _run_batch_subprocess(args: dict) -> list[dict]: + """Run OmniVoice batch subprocess. Model loads once.""" + timeout = 180 + len(args.get("items", [])) * 45 # ~180s model load + ~45s per scene + proc = subprocess.run( + [PYTHON_BIN, "-c", _TTS_BATCH_SCRIPT, json.dumps(args)], + capture_output=True, text=True, timeout=timeout, + ) + if proc.returncode != 0: + error = proc.stderr[-500:] if proc.stderr else "unknown" + return [{"id": item["id"], "ok": False, "error": error} for item in args["items"]] + try: + return json.loads(proc.stdout.strip().split("\n")[-1]) + except (json.JSONDecodeError, IndexError): + error = proc.stdout[-200:] + proc.stderr[-200:] + return [{"id": item["id"], "ok": False, "error": error} for item in args["items"]] + + +def _ffprobe_duration(path: str) -> float | None: + try: + proc = subprocess.run( + [ + "ffprobe", + "-v", + "quiet", + "-show_entries", + "format=duration", + "-of", + "csv=p=0", + path, + ], + capture_output=True, + text=True, + timeout=30, + ) + if proc.returncode != 0: + return None + return float(proc.stdout.strip()) + except Exception: + return None + + +def _atempo_filter(speed: float) -> str | None: + if abs(speed - 1.0) < 1e-3: + return None + + # ffmpeg atempo supports 0.5..2.0 per stage, so chain when needed. + stages: list[float] = [] + remaining = float(speed) + while remaining > 2.0: + stages.append(2.0) + remaining /= 2.0 + while remaining < 0.5: + stages.append(0.5) + remaining /= 0.5 + stages.append(remaining) + + return ",".join(f"atempo={x:.4f}" for x in stages) + + +def _convert_audio_to_wav(src_path: Path, dst_path: Path, speed: float) -> bool: + cmd = [ + "ffmpeg", + "-y", + "-i", + str(src_path), + "-ac", + "1", + "-ar", + str(TTS_SAMPLE_RATE), + ] + speed_filter = _atempo_filter(speed) + if speed_filter: + cmd += ["-filter:a", speed_filter] + cmd += ["-c:a", "pcm_s16le", str(dst_path)] + + try: + proc = subprocess.run(cmd, capture_output=True, text=True, timeout=120) + if proc.returncode != 0: + logger.error("ffmpeg convert failed: %s", (proc.stderr or "")[-400:]) + return False + except Exception: + logger.exception("ffmpeg convert raised exception") + return False + + return dst_path.exists() and dst_path.stat().st_size > 1024 + + +async def _generate_speech_omnivoice( text: str, output_path: str, instruct: Optional[str] = None, @@ -81,9 +196,6 @@ async def generate_speech( ref_text: Optional[str] = None, speed: float = 1.0, ) -> str: - """Generate speech for text via subprocess. Returns path to WAV file.""" - Path(output_path).parent.mkdir(parents=True, exist_ok=True) - args = { "model": TTS_MODEL, "text": text, @@ -100,26 +212,130 @@ async def generate_speech( loop = asyncio.get_event_loop() result = await loop.run_in_executor(None, _run_tts_subprocess, args) - if not result.get("ok"): - raise RuntimeError(f"TTS failed: {result.get('error', 'unknown')}") + raise RuntimeError(f"OmniVoice TTS failed: {result.get('error', 'unknown')}") - logger.info("TTS saved to %s", output_path) return output_path -def _run_tts_subprocess(args: dict) -> dict: - """Run TTS subprocess.""" - proc = subprocess.run( - [PYTHON_BIN, "-c", _TTS_SCRIPT, json.dumps(args)], - capture_output=True, text=True, timeout=120, - ) - if proc.returncode != 0: - return {"ok": False, "error": proc.stderr[-500:] if proc.stderr else "unknown error"} +async def _generate_speech_elevenlabs( + text: str, + output_path: str, + speed: float = 1.0, + voice_id: Optional[str] = None, + model_id: Optional[str] = None, +) -> str: + settings = get_tts_settings() + api_key = str(settings.get("elevenlabs_api_key") or "").strip() + if not api_key: + raise RuntimeError("ElevenLabs API key is not configured") + + resolved_voice = str(voice_id or settings.get("elevenlabs_default_voice_id") or "").strip() + if not resolved_voice: + raise RuntimeError("ElevenLabs voice_id is required (set default voice in TTS settings)") + + resolved_model = str(model_id or settings.get("elevenlabs_model_id") or "eleven_multilingual_v2").strip() + base_url = str(settings.get("elevenlabs_api_base") or "https://api.elevenlabs.io").rstrip("/") + timeout_sec = float(settings.get("elevenlabs_timeout_sec") or 60) + max_retries = int(settings.get("elevenlabs_max_retries") or 2) + + url = f"{base_url}/v1/text-to-speech/{resolved_voice}" + headers = { + "xi-api-key": api_key, + "Content-Type": "application/json", + "Accept": "audio/mpeg", + } + payload: dict = { + "text": text, + "model_id": resolved_model, + } + + response_bytes: bytes | None = None + last_error: str = "" + + async with httpx.AsyncClient(timeout=timeout_sec) as client: + for attempt in range(max_retries + 1): + try: + res = await client.post(url, headers=headers, json=payload) + except Exception as e: + last_error = str(e) + if attempt < max_retries: + await asyncio.sleep(min(4.0, 1.0 + attempt)) + continue + raise RuntimeError(f"ElevenLabs request failed: {last_error}") + + if res.status_code == 429 and attempt < max_retries: + await asyncio.sleep(min(5.0, 1.5 + attempt * 1.5)) + continue + + if res.status_code >= 400: + err_text = (res.text or "").strip() + if res.status_code in (401, 403): + raise RuntimeError(f"ElevenLabs auth failed ({res.status_code}). Check API key") + if res.status_code == 429: + raise RuntimeError("ElevenLabs rate limit reached (429)") + raise RuntimeError(f"ElevenLabs API error {res.status_code}: {err_text[:300]}") + + response_bytes = res.content + break + + if not response_bytes: + raise RuntimeError(f"ElevenLabs returned no audio bytes. Last error: {last_error or 'unknown'}") + + out_path = Path(output_path) + out_path.parent.mkdir(parents=True, exist_ok=True) + + tmp_mp3 = out_path.with_suffix(f".tmp_{out_path.stem}.mp3") try: - return json.loads(proc.stdout.strip().split("\n")[-1]) - except (json.JSONDecodeError, IndexError): - return {"ok": False, "error": proc.stdout[-200:] + proc.stderr[-200:]} + tmp_mp3.write_bytes(response_bytes) + ok = _convert_audio_to_wav(tmp_mp3, out_path, speed) + if not ok: + raise RuntimeError("Failed to convert ElevenLabs audio to WAV") + finally: + try: + if tmp_mp3.exists(): + tmp_mp3.unlink() + except Exception: + pass + + return output_path + + +async def generate_speech( + text: str, + output_path: str, + instruct: Optional[str] = None, + ref_audio: Optional[str] = None, + ref_text: Optional[str] = None, + speed: float = 1.0, + voice_id: Optional[str] = None, + model_id: Optional[str] = None, +) -> str: + """Generate speech for text. Returns path to WAV file.""" + Path(output_path).parent.mkdir(parents=True, exist_ok=True) + + provider = _tts_provider() + if provider == "omnivoice": + result = await _generate_speech_omnivoice( + text=text, + output_path=output_path, + instruct=instruct, + ref_audio=ref_audio, + ref_text=ref_text, + speed=speed, + ) + logger.info("TTS saved to %s (provider=omnivoice)", output_path) + return result + + result = await _generate_speech_elevenlabs( + text=text, + output_path=output_path, + speed=speed, + voice_id=voice_id, + model_id=model_id, + ) + logger.info("TTS saved to %s (provider=elevenlabs)", output_path) + return result async def generate_video_narration( @@ -129,16 +345,17 @@ async def generate_video_narration( ref_audio: Optional[str] = None, ref_text: Optional[str] = None, speed: float = 1.0, + voice_id: Optional[str] = None, + model_id: Optional[str] = None, ) -> list[dict]: """Generate narration WAVs for scenes with narrator_text. - Uses batch subprocess — loads model once for all scenes. - Returns list of result dicts. + OmniVoice mode uses a batch subprocess for performance. + ElevenLabs mode generates per scene (network API). """ out_dir = Path(output_dir) out_dir.mkdir(parents=True, exist_ok=True) - # Build batch items (only scenes with narrator_text) items = [] scene_map = {} for scene in scenes: @@ -150,36 +367,66 @@ async def generate_video_narration( continue wav_path = str(out_dir / f"scene_{display_order:03d}_{scene_id}.wav") - # Skip if WAV already exists and is non-trivial (>1KB) if Path(wav_path).exists() and Path(wav_path).stat().st_size > 1024: logger.info("Skipping scene %03d (WAV exists: %s)", display_order, wav_path) - scene_map[scene_id] = {"display_order": display_order, "narrator_text": narrator_text, "skipped": True, "wav_path": wav_path} + scene_map[scene_id] = { + "display_order": display_order, + "narrator_text": narrator_text, + "skipped": True, + "wav_path": wav_path, + } continue items.append({"id": scene_id, "text": narrator_text, "output": wav_path}) scene_map[scene_id] = {"display_order": display_order, "narrator_text": narrator_text} - # Run batch subprocess if there are items - batch_results = {} + batch_results: dict[str, dict] = {} + provider = _tts_provider() + if items: - args = { - "model": TTS_MODEL, - "sample_rate": TTS_SAMPLE_RATE, - "speed": speed, - "items": items, - } - if instruct: - args["instruct"] = instruct - if ref_audio: - args["ref_audio"] = ref_audio - if ref_text: - args["ref_text"] = ref_text - - loop = asyncio.get_event_loop() - raw = await loop.run_in_executor(None, _run_batch_subprocess, args) - for r in raw: - batch_results[r["id"]] = r - - # Build final results for all scenes + if provider == "omnivoice": + args = { + "model": TTS_MODEL, + "sample_rate": TTS_SAMPLE_RATE, + "speed": speed, + "items": items, + } + if instruct: + args["instruct"] = instruct + if ref_audio: + args["ref_audio"] = ref_audio + if ref_text: + args["ref_text"] = ref_text + + loop = asyncio.get_event_loop() + raw = await loop.run_in_executor(None, _run_batch_subprocess, args) + for r in raw: + batch_results[r["id"]] = r + else: + for item in items: + try: + await generate_speech( + text=item["text"], + output_path=item["output"], + instruct=instruct, + ref_audio=ref_audio, + ref_text=ref_text, + speed=speed, + voice_id=voice_id, + model_id=model_id, + ) + batch_results[item["id"]] = { + "id": item["id"], + "ok": True, + "path": item["output"], + "duration": _ffprobe_duration(item["output"]), + } + except Exception as e: + batch_results[item["id"]] = { + "id": item["id"], + "ok": False, + "error": str(e), + } + results = [] for scene in scenes: scene_id = scene.get("id") @@ -205,7 +452,7 @@ async def generate_video_narration( "display_order": display_order, "narrator_text": narrator_text, "audio_path": sm["wav_path"], - "duration": None, + "duration": _ffprobe_duration(sm["wav_path"]), "status": "COMPLETED", "error": None, }) @@ -234,20 +481,3 @@ async def generate_video_narration( }) return results - - -def _run_batch_subprocess(args: dict) -> list[dict]: - """Run batch TTS subprocess. Model loads once.""" - timeout = 180 + len(args.get("items", [])) * 45 # ~180s model load + ~45s per scene - proc = subprocess.run( - [PYTHON_BIN, "-c", _TTS_BATCH_SCRIPT, json.dumps(args)], - capture_output=True, text=True, timeout=timeout, - ) - if proc.returncode != 0: - error = proc.stderr[-500:] if proc.stderr else "unknown" - return [{"id": item["id"], "ok": False, "error": error} for item in args["items"]] - try: - return json.loads(proc.stdout.strip().split("\n")[-1]) - except (json.JSONDecodeError, IndexError): - error = proc.stdout[-200:] + proc.stderr[-200:] - return [{"id": item["id"], "ok": False, "error": error} for item in args["items"]] diff --git a/agent/services/tts_catalog.py b/agent/services/tts_catalog.py new file mode 100644 index 0000000..543c428 --- /dev/null +++ b/agent/services/tts_catalog.py @@ -0,0 +1,232 @@ +"""Fetch runtime TTS catalog (models + voices) from ElevenLabs.""" +from __future__ import annotations + +import asyncio +import time +from typing import Any + +import httpx + +from agent.services.tts_settings import get_tts_settings + +_CACHE_TTL_SEC = 60.0 +_CACHE_LOCK = asyncio.Lock() +_CACHE_EXPIRES_AT = 0.0 +_CACHE_KEY = "" +_CACHE_VALUE: dict[str, Any] | None = None + +_FALLBACK_MODELS: list[dict[str, Any]] = [ + { + "model_id": "eleven_multilingual_v2", + "name": "Eleven Multilingual v2", + "description": "High quality multilingual narration.", + "language_count": 29, + }, + { + "model_id": "eleven_turbo_v2_5", + "name": "Eleven Turbo v2.5", + "description": "Fast generation with solid quality.", + "language_count": 32, + }, + { + "model_id": "eleven_flash_v2_5", + "name": "Eleven Flash v2.5", + "description": "Ultra-low latency model.", + "language_count": 32, + }, + { + "model_id": "eleven_english_v2", + "name": "Eleven English v2", + "description": "English-focused model.", + "language_count": 1, + }, +] + + +def _safe_str(value: Any) -> str: + return str(value or "").strip() + + +def _parse_models(payload: Any) -> list[dict[str, Any]]: + rows: list[Any] = [] + if isinstance(payload, list): + rows = payload + elif isinstance(payload, dict): + raw = payload.get("models") + if isinstance(raw, list): + rows = raw + + out: list[dict[str, Any]] = [] + for row in rows: + if not isinstance(row, dict): + continue + model_id = _safe_str(row.get("model_id") or row.get("id")) + if not model_id: + continue + name = _safe_str(row.get("name")) or model_id + desc = _safe_str(row.get("description")) + languages_raw = row.get("languages") + language_count = 0 + if isinstance(languages_raw, list): + language_count = len(languages_raw) + out.append( + { + "model_id": model_id, + "name": name, + "description": desc, + "language_count": language_count, + } + ) + + out.sort(key=lambda item: item["name"].lower()) + return out + + +def _parse_voices(payload: Any) -> list[dict[str, Any]]: + rows: list[Any] = [] + if isinstance(payload, dict): + raw = payload.get("voices") + if isinstance(raw, list): + rows = raw + + out: list[dict[str, Any]] = [] + for row in rows: + if not isinstance(row, dict): + continue + voice_id = _safe_str(row.get("voice_id") or row.get("id")) + if not voice_id: + continue + + labels_raw = row.get("labels") + labels: dict[str, str] = {} + if isinstance(labels_raw, dict): + for key, value in labels_raw.items(): + text = _safe_str(value) + if text: + labels[str(key)] = text + + out.append( + { + "voice_id": voice_id, + "name": _safe_str(row.get("name")) or voice_id, + "category": _safe_str(row.get("category")), + "preview_url": _safe_str(row.get("preview_url")) or None, + "labels": labels, + } + ) + + out.sort(key=lambda item: item["name"].lower()) + return out + + +def _source(models_api: bool, voices_api: bool) -> str: + if models_api and voices_api: + return "api" + if models_api or voices_api: + return "mixed" + return "fallback" + + +async def _fetch_json(client: httpx.AsyncClient, url: str, headers: dict[str, str]) -> tuple[Any | None, str | None]: + try: + res = await client.get(url, headers=headers) + except Exception as exc: # pragma: no cover - network edge cases + return None, str(exc) + if res.status_code >= 400: + body = (res.text or "").strip() + msg = f"HTTP {res.status_code}" + if body: + msg = f"{msg}: {body[:220]}" + return None, msg + try: + return res.json(), None + except Exception: + return None, "Invalid JSON response" + + +async def load_tts_catalog(*, force_refresh: bool = False) -> dict[str, Any]: + settings = get_tts_settings() + provider = _safe_str(settings.get("provider") or "elevenlabs").lower() + + if provider != "elevenlabs": + return { + "provider": provider if provider in {"elevenlabs", "omnivoice"} else "elevenlabs", + "source": "fallback", + "models": [], + "voices": [], + "warnings": ["Provider hiện tại không hỗ trợ catalog remote."], + } + + api_base = _safe_str(settings.get("elevenlabs_api_base")) or "https://api.elevenlabs.io" + api_key = _safe_str(settings.get("elevenlabs_api_key")) + timeout_sec = max(5.0, float(settings.get("elevenlabs_timeout_sec") or 60.0)) + cache_key = f"{provider}|{api_base}|{api_key}" + + global _CACHE_EXPIRES_AT, _CACHE_KEY, _CACHE_VALUE + now = time.time() + if ( + not force_refresh + and _CACHE_VALUE is not None + and _CACHE_KEY == cache_key + and now < _CACHE_EXPIRES_AT + ): + return dict(_CACHE_VALUE) + + warnings: list[str] = [] + headers = {"Accept": "application/json"} + if api_key: + headers["xi-api-key"] = api_key + else: + warnings.append("Chưa có ElevenLabs API key, không thể tải danh sách voice.") + + models = list(_FALLBACK_MODELS) + voices: list[dict[str, Any]] = [] + models_from_api = False + voices_from_api = False + + async with _CACHE_LOCK: + # Double-check cache while waiting lock. + now = time.time() + if ( + not force_refresh + and _CACHE_VALUE is not None + and _CACHE_KEY == cache_key + and now < _CACHE_EXPIRES_AT + ): + return dict(_CACHE_VALUE) + + async with httpx.AsyncClient(timeout=timeout_sec) as client: + if api_key: + models_payload, models_err = await _fetch_json(client, f"{api_base.rstrip('/')}/v1/models", headers) + if models_err: + warnings.append(f"Không tải được model list: {models_err}") + else: + parsed_models = _parse_models(models_payload) + if parsed_models: + models = parsed_models + models_from_api = True + else: + warnings.append("Model list rỗng từ ElevenLabs, dùng fallback mặc định.") + + voices_payload, voices_err = await _fetch_json(client, f"{api_base.rstrip('/')}/v1/voices", headers) + if voices_err: + warnings.append(f"Không tải được voice list: {voices_err}") + else: + parsed_voices = _parse_voices(voices_payload) + if parsed_voices: + voices = parsed_voices + voices_from_api = True + else: + warnings.append("Voice list rỗng từ ElevenLabs.") + + payload = { + "provider": "elevenlabs", + "source": _source(models_from_api, voices_from_api), + "models": models, + "voices": voices, + "warnings": warnings, + } + _CACHE_KEY = cache_key + _CACHE_VALUE = dict(payload) + _CACHE_EXPIRES_AT = time.time() + _CACHE_TTL_SEC + return payload diff --git a/agent/services/tts_settings.py b/agent/services/tts_settings.py new file mode 100644 index 0000000..a038143 --- /dev/null +++ b/agent/services/tts_settings.py @@ -0,0 +1,172 @@ +"""Persistent settings for TTS providers (ElevenLabs / OmniVoice).""" +from __future__ import annotations + +import json +from pathlib import Path +from typing import Any + +from agent.config import ( + TTS_SETTINGS_PATH, + TTS_PROVIDER, + ELEVENLABS_API_KEY, + ELEVENLABS_MODEL_ID, + ELEVENLABS_DEFAULT_VOICE_ID, + ELEVENLABS_TIMEOUT_SEC, + ELEVENLABS_MAX_RETRIES, + ELEVENLABS_API_BASE, +) + +_ALLOWED_PROVIDERS = {"elevenlabs", "omnivoice"} + + +def _defaults() -> dict[str, Any]: + provider = (TTS_PROVIDER or "elevenlabs").strip().lower() + if provider not in _ALLOWED_PROVIDERS: + provider = "elevenlabs" + return { + "provider": provider, + "elevenlabs_api_base": ELEVENLABS_API_BASE, + "elevenlabs_api_key": ELEVENLABS_API_KEY, + "elevenlabs_model_id": ELEVENLABS_MODEL_ID or "eleven_multilingual_v2", + "elevenlabs_default_voice_id": ELEVENLABS_DEFAULT_VOICE_ID, + "elevenlabs_timeout_sec": max(5.0, float(ELEVENLABS_TIMEOUT_SEC or 60)), + "elevenlabs_max_retries": max(0, int(ELEVENLABS_MAX_RETRIES or 2)), + } + + +def _read_raw_file() -> dict[str, Any]: + path = Path(TTS_SETTINGS_PATH) + if not path.exists(): + return {} + try: + raw = json.loads(path.read_text()) + if isinstance(raw, dict): + return raw + except Exception: + return {} + return {} + + +def _write_raw_file(data: dict[str, Any]) -> None: + path = Path(TTS_SETTINGS_PATH) + path.parent.mkdir(parents=True, exist_ok=True) + path.write_text(json.dumps(data, indent=2, ensure_ascii=False)) + + +def _normalize_provider(value: Any) -> str: + provider = str(value or "").strip().lower() + return provider if provider in _ALLOWED_PROVIDERS else "elevenlabs" + + +def _normalize_base_url(value: Any) -> str: + raw = str(value or "").strip() + if not raw: + return ELEVENLABS_API_BASE + return raw.rstrip("/") + + +def _normalize_float(value: Any, default: float, minimum: float) -> float: + try: + parsed = float(value) + except Exception: + parsed = default + return max(minimum, parsed) + + +def _normalize_int(value: Any, default: int, minimum: int) -> int: + try: + parsed = int(value) + except Exception: + parsed = default + return max(minimum, parsed) + + +def _normalize(settings: dict[str, Any]) -> dict[str, Any]: + defaults = _defaults() + normalized = { + "provider": _normalize_provider(settings.get("provider", defaults["provider"])), + "elevenlabs_api_base": _normalize_base_url(settings.get("elevenlabs_api_base", defaults["elevenlabs_api_base"])), + "elevenlabs_api_key": str(settings.get("elevenlabs_api_key", defaults["elevenlabs_api_key"]) or "").strip(), + "elevenlabs_model_id": str(settings.get("elevenlabs_model_id", defaults["elevenlabs_model_id"]) or "").strip() or "eleven_multilingual_v2", + "elevenlabs_default_voice_id": str(settings.get("elevenlabs_default_voice_id", defaults["elevenlabs_default_voice_id"]) or "").strip(), + "elevenlabs_timeout_sec": _normalize_float( + settings.get("elevenlabs_timeout_sec", defaults["elevenlabs_timeout_sec"]), + float(defaults["elevenlabs_timeout_sec"]), + 5.0, + ), + "elevenlabs_max_retries": _normalize_int( + settings.get("elevenlabs_max_retries", defaults["elevenlabs_max_retries"]), + int(defaults["elevenlabs_max_retries"]), + 0, + ), + } + return normalized + + +def get_tts_settings() -> dict[str, Any]: + """Return effective settings (defaults + file overrides), including secret key.""" + defaults = _defaults() + overrides = _read_raw_file() + merged = {**defaults, **overrides} + return _normalize(merged) + + +def update_tts_settings( + *, + provider: str | None = None, + elevenlabs_api_base: str | None = None, + elevenlabs_api_key: str | None = None, + clear_elevenlabs_api_key: bool = False, + elevenlabs_model_id: str | None = None, + elevenlabs_default_voice_id: str | None = None, + elevenlabs_timeout_sec: float | None = None, + elevenlabs_max_retries: int | None = None, +) -> dict[str, Any]: + """Update persisted settings and return normalized effective settings.""" + current = get_tts_settings() + next_settings = dict(current) + + if provider is not None: + next_settings["provider"] = provider + if elevenlabs_api_base is not None: + next_settings["elevenlabs_api_base"] = elevenlabs_api_base + if clear_elevenlabs_api_key: + next_settings["elevenlabs_api_key"] = "" + elif elevenlabs_api_key is not None: + next_settings["elevenlabs_api_key"] = elevenlabs_api_key + if elevenlabs_model_id is not None: + next_settings["elevenlabs_model_id"] = elevenlabs_model_id + if elevenlabs_default_voice_id is not None: + next_settings["elevenlabs_default_voice_id"] = elevenlabs_default_voice_id + if elevenlabs_timeout_sec is not None: + next_settings["elevenlabs_timeout_sec"] = elevenlabs_timeout_sec + if elevenlabs_max_retries is not None: + next_settings["elevenlabs_max_retries"] = elevenlabs_max_retries + + normalized = _normalize(next_settings) + _write_raw_file(normalized) + return normalized + + +def mask_secret(secret: str) -> str: + token = (secret or "").strip() + if not token: + return "" + if len(token) <= 8: + return "*" * len(token) + return f"{token[:4]}{'*' * (len(token) - 8)}{token[-4:]}" + + +def get_tts_settings_public() -> dict[str, Any]: + settings = get_tts_settings() + return { + "provider": settings["provider"], + "elevenlabs_api_base": settings["elevenlabs_api_base"], + "elevenlabs_model_id": settings["elevenlabs_model_id"], + "elevenlabs_default_voice_id": settings["elevenlabs_default_voice_id"], + "elevenlabs_timeout_sec": settings["elevenlabs_timeout_sec"], + "elevenlabs_max_retries": settings["elevenlabs_max_retries"], + "elevenlabs_api_key_set": bool(settings["elevenlabs_api_key"]), + "elevenlabs_api_key_masked": mask_secret(settings["elevenlabs_api_key"]), + } + diff --git a/agent/utils/orientation.py b/agent/utils/orientation.py new file mode 100644 index 0000000..967e124 --- /dev/null +++ b/agent/utils/orientation.py @@ -0,0 +1,44 @@ +"""Orientation helpers shared across API/worker modules.""" + +from __future__ import annotations + + +def normalize_orientation(value: str | None, default: str = "VERTICAL") -> str: + """Normalize orientation aliases to VERTICAL/HORIZONTAL. + + Accepted aliases include: + - VERTICAL: VERTICAL, PORTRAIT, 9:16, 9/16, *_PORTRAIT + - HORIZONTAL: HORIZONTAL, LANDSCAPE, 16:9, 16/9, *_LANDSCAPE + """ + if not value: + return default + upper = str(value).strip().upper().replace(" ", "") + + vertical_aliases = { + "VERTICAL", + "PORTRAIT", + "9:16", + "9/16", + "VIDEO_ASPECT_RATIO_PORTRAIT", + "IMAGE_ASPECT_RATIO_PORTRAIT", + } + horizontal_aliases = { + "HORIZONTAL", + "LANDSCAPE", + "16:9", + "16/9", + "VIDEO_ASPECT_RATIO_LANDSCAPE", + "IMAGE_ASPECT_RATIO_LANDSCAPE", + } + + if upper in vertical_aliases or upper.endswith("_PORTRAIT"): + return "VERTICAL" + if upper in horizontal_aliases or upper.endswith("_LANDSCAPE"): + return "HORIZONTAL" + return default + + +def orientation_prefix(value: str | None, default: str = "VERTICAL") -> str: + """Return DB field prefix for the given orientation.""" + return "vertical" if normalize_orientation(value, default=default) == "VERTICAL" else "horizontal" + diff --git a/agent/worker/_parsing.py b/agent/worker/_parsing.py index d74071c..7960d8c 100644 --- a/agent/worker/_parsing.py +++ b/agent/worker/_parsing.py @@ -32,6 +32,48 @@ def _extract_uuid_from_url(url: str) -> str: return match.group(1) if match else "" +def _is_direct_media_url(url: str | None) -> bool: + if not isinstance(url, str): + return False + low = url.lower() + if not low.startswith("http"): + return False + if "media.getmediaurlredirect" in low: + return False + if low.startswith("https://flow-content.google/"): + return True + if low.startswith("https://storage.googleapis.com/"): + return True + if "googleusercontent.com/" in low: + return True + return False + + +def _collect_media_urls(node: object, out: list[str]) -> None: + if isinstance(node, dict): + for key in ("fifeUrl", "servingUri", "url", "imageUri", "videoUri"): + val = node.get(key) + if isinstance(val, str) and val.startswith("http"): + out.append(val) + for val in node.values(): + _collect_media_urls(val, out) + return + if isinstance(node, list): + for item in node: + _collect_media_urls(item, out) + + +def _pick_best_media_url(node: object) -> str: + candidates: list[str] = [] + _collect_media_urls(node, candidates) + if not candidates: + return "" + for url in candidates: + if _is_direct_media_url(url): + return url + return candidates[0] + + def _extract_media_id(result: dict, req_type: str) -> str: """Extract the UUID-format mediaId from API response. @@ -62,7 +104,7 @@ def _extract_media_id(result: dict, req_type: str) -> str: logger.warning("media[0].name is not UUID format: %s", name[:30]) return None - if req_type in ("GENERATE_VIDEO", "REGENERATE_VIDEO", "GENERATE_VIDEO_REFS", "UPSCALE_VIDEO"): + if req_type in ("GENERATE_VIDEO", "REGENERATE_VIDEO", "GENERATE_VIDEO_REFS", "UPSCALE_VIDEO", "UPSCALE_VIDEO_LOCAL"): ops = data.get("operations", []) if ops: video_meta = ops[0].get("operation", {}).get("metadata", {}).get("video", {}) @@ -95,17 +137,22 @@ def _extract_output_url(result: dict, req_type: str) -> str: media = data.get("media", []) if media: gen = media[0].get("image", {}).get("generatedImage", {}) - return gen.get("fifeUrl", gen.get("imageUri", gen.get("encodedImage", ""))) + picked = _pick_best_media_url(gen) + if picked: + return picked - if req_type in ("GENERATE_VIDEO", "REGENERATE_VIDEO", "GENERATE_VIDEO_REFS", "UPSCALE_VIDEO"): + if req_type in ("GENERATE_VIDEO", "REGENERATE_VIDEO", "GENERATE_VIDEO_REFS", "UPSCALE_VIDEO", "UPSCALE_VIDEO_LOCAL"): ops = data.get("operations", []) if ops: video_meta = ops[0].get("operation", {}).get("metadata", {}).get("video", {}) - url = video_meta.get("fifeUrl", "") - if url: - return url + picked = _pick_best_media_url(video_meta) + if picked: + return picked # Inline rawBytes — no URL, check if saved locally if ops[0].get("rawBytes") or ops[0].get("mediaGenerationId"): return "" # URL will be set by _save_raw_bytes in operations.py + picked = _pick_best_media_url(data) + if picked: + return picked return data.get("videoUri", data.get("imageUri", "")) diff --git a/agent/worker/processor.py b/agent/worker/processor.py index 5029708..429bf5d 100644 --- a/agent/worker/processor.py +++ b/agent/worker/processor.py @@ -8,46 +8,184 @@ import json import logging import time +import re +from datetime import datetime, timedelta, timezone import aiohttp from agent.db import crud from agent.services.flow_client import get_flow_client from agent.services.event_bus import event_bus -from agent.config import POLL_INTERVAL, MAX_RETRIES, API_COOLDOWN, MAX_CONCURRENT_REQUESTS +from agent.config import ( + POLL_INTERVAL, + MAX_RETRIES, + API_COOLDOWN, + IMAGE_API_COOLDOWN, + CHARACTER_IMAGE_API_COOLDOWN, + MAX_CONCURRENT_REQUESTS, + MAX_CONCURRENT_CAPTCHA_REQUESTS, + CAPTCHA_API_COOLDOWN, + VIDEO_API_COOLDOWN, + MAX_CONCURRENT_IMAGE_REQUESTS, + MAX_CONCURRENT_VIDEO_REQUESTS, + MAX_CONCURRENT_LOCAL_UPSCALE_REQUESTS, + MAX_CONCURRENT_CHARACTER_REF_REQUESTS, + CAPTCHA_RETRY_LIMIT, + CAPTCHA_RETRY_BACKOFF_BASE, + CAPTCHA_RETRY_BACKOFF_MAX, + CAPTCHA_GROUP_PAUSE_SEC, + CAPTCHA_TRAFFIC_PAUSE_SEC, + CAPTCHA_SAFE_MODE_SEC, + CAPTCHA_SAFE_MODE_IMAGE_CONCURRENCY, + CAPTCHA_SAFE_MODE_IMAGE_COOLDOWN, + CAPTCHA_CONTENT_TIMEOUT_PAUSE_SEC, + OPERATION_FAILED_RETRY_BASE_SEC, + REQUEST_DISPATCH_TIMEOUT, + VIDEO_POLL_TIMEOUT, + STALE_PENDING_LOCAL_UPSCALE_TIMEOUT, +) from agent.worker._parsing import _is_error from agent.sdk.services.result_handler import parse_result, apply_scene_result, apply_character_result +from agent.utils.orientation import normalize_orientation logger = logging.getLogger(__name__) _API_CALL_TYPES = {"GENERATE_IMAGE", "REGENERATE_IMAGE", "EDIT_IMAGE", - "GENERATE_VIDEO", "REGENERATE_VIDEO", "GENERATE_VIDEO_REFS", "UPSCALE_VIDEO", + "GENERATE_VIDEO", "REGENERATE_VIDEO", "GENERATE_VIDEO_REFS", "UPSCALE_VIDEO", "UPSCALE_VIDEO_LOCAL", "GENERATE_CHARACTER_IMAGE", "REGENERATE_CHARACTER_IMAGE", "EDIT_CHARACTER_IMAGE"} +_IMAGE_CALL_TYPES = {"GENERATE_IMAGE", "REGENERATE_IMAGE", "EDIT_IMAGE", + "GENERATE_CHARACTER_IMAGE", "REGENERATE_CHARACTER_IMAGE", "EDIT_CHARACTER_IMAGE"} +_CHARACTER_IMAGE_CALL_TYPES = {"GENERATE_CHARACTER_IMAGE", "REGENERATE_CHARACTER_IMAGE", "EDIT_CHARACTER_IMAGE"} +_VIDEO_CALL_TYPES = { + "GENERATE_VIDEO", + "REGENERATE_VIDEO", + "GENERATE_VIDEO_REFS", + "UPSCALE_VIDEO", + "UPSCALE_VIDEO_LOCAL", +} +_LOCAL_UPSCALE_CALL_TYPES = { + "UPSCALE_VIDEO", + "UPSCALE_VIDEO_LOCAL", +} +_CAPTCHA_CALL_TYPES = { + "GENERATE_IMAGE", + "REGENERATE_IMAGE", + "EDIT_IMAGE", + "GENERATE_VIDEO", + "REGENERATE_VIDEO", + "GENERATE_VIDEO_REFS", + "GENERATE_CHARACTER_IMAGE", + "REGENERATE_CHARACTER_IMAGE", + "EDIT_CHARACTER_IMAGE", +} _TYPE_PRIORITY = { "GENERATE_CHARACTER_IMAGE": 0, "REGENERATE_CHARACTER_IMAGE": 0, "EDIT_CHARACTER_IMAGE": 0, "GENERATE_IMAGE": 1, "REGENERATE_IMAGE": 1, "EDIT_IMAGE": 1, "GENERATE_VIDEO": 2, "REGENERATE_VIDEO": 2, "GENERATE_VIDEO_REFS": 2, - "UPSCALE_VIDEO": 3, + "UPSCALE_VIDEO": 3, "UPSCALE_VIDEO_LOCAL": 3, } +_OP_NAME_RE = re.compile(r"Operation failed:\s*([A-Za-z0-9_-]+)") +_LOCAL_UPSCALE_SETUP_MARKER = "local_upscale_setup_required" + +# Backward-compatible module-level retry map used by unit tests and as +# fallback state when _handle_failure is called without explicit retry dict. +_retry_state: dict[str, float] = {} + + +def _iso_after(seconds: float) -> str: + ts = datetime.now(timezone.utc) + timedelta(seconds=max(0.0, float(seconds))) + return ts.strftime("%Y-%m-%dT%H:%M:%SZ") + + +def _is_flow_tab_unavailable_error(error_lower: str) -> bool: + if not error_lower: + return False + markers = ( + "no_flow_tab", + "no flow tab", + "flow_tab_not_ready", + "flow tab not ready", + "flow tab unavailable", + "cannot access contents of the page", + "must request permission to access the respective host", + "grecaptcha not available", + "context invalidated", + "token expired", + "state off", + "no active flow tab", + "could not establish connection", + ) + return any(marker in error_lower for marker in markers) + + +def _is_unsafe_generation_error(error_lower: str) -> bool: + markers = ( + "public_error_unsafe_generation", + "unsafe_generation", + "unsafe generation", + ) + return any(marker in error_lower for marker in markers) + + +def _is_unusual_traffic_error(error_lower: str) -> bool: + markers = ( + "public_error_unusual_activity_too_much_traffic", + "too_much_traffic", + "too much traffic", + "unusual activity", + ) + return any(marker in error_lower for marker in markers) + + +def _is_captcha_timeout_error(error_lower: str) -> bool: + markers = ( + "content_timeout", + "captcha_timeout", + "timed out", + ) + return any(marker in error_lower for marker in markers) + class APIRateLimiter: """Enforces max concurrent requests AND minimum gap between API calls.""" - def __init__(self, max_concurrent: int, cooldown_seconds: float): + def __init__(self, max_concurrent: int, cooldown_seconds: float, + image_cooldown_seconds: float, character_image_cooldown_seconds: float): self._semaphore = asyncio.Semaphore(max_concurrent) self._cooldown = cooldown_seconds + self._image_cooldown = image_cooldown_seconds + self._character_image_cooldown = character_image_cooldown_seconds self._last_call = 0.0 + self._last_image_call = 0.0 + self._last_character_image_call = 0.0 self._gate = asyncio.Lock() + self._image_gate = asyncio.Lock() + self._character_image_gate = asyncio.Lock() - async def acquire(self): + async def acquire(self, req_type: str): await self._semaphore.acquire() + global_cooldown = self._cooldown + if req_type in _CHARACTER_IMAGE_CALL_TYPES: + global_cooldown = min(self._cooldown, self._character_image_cooldown) async with self._gate: elapsed = time.monotonic() - self._last_call - if elapsed < self._cooldown: - await asyncio.sleep(self._cooldown - elapsed) + if elapsed < global_cooldown: + await asyncio.sleep(global_cooldown - elapsed) self._last_call = time.monotonic() + if req_type in _CHARACTER_IMAGE_CALL_TYPES: + async with self._character_image_gate: + elapsed = time.monotonic() - self._last_character_image_call + if elapsed < self._character_image_cooldown: + await asyncio.sleep(self._character_image_cooldown - elapsed) + self._last_character_image_call = time.monotonic() + elif req_type in _IMAGE_CALL_TYPES: + async with self._image_gate: + elapsed = time.monotonic() - self._last_image_call + if elapsed < self._image_cooldown: + await asyncio.sleep(self._image_cooldown - elapsed) + self._last_image_call = time.monotonic() def release(self): self._semaphore.release() @@ -59,9 +197,89 @@ class WorkerController: def __init__(self): self._shutdown = asyncio.Event() self._active_ids: set[str] = set() - self._rate_limiter = APIRateLimiter(MAX_CONCURRENT_REQUESTS, API_COOLDOWN) + self._active_types: dict[str, str] = {} + self._rate_limiter = APIRateLimiter( + MAX_CONCURRENT_REQUESTS, + API_COOLDOWN, + IMAGE_API_COOLDOWN, + CHARACTER_IMAGE_API_COOLDOWN, + ) self._deferred: dict[str, float] = {} # rid -> defer_until timestamp self._retry_after: dict[str, float] = {} # rid -> retry_after timestamp + self._group_retry_after: dict[str, float] = {} # group key -> retry_after timestamp + + def _image_safe_mode_active(self, now: float) -> bool: + return self._group_retry_after.get("image_safe_mode_until", 0.0) > now + + def _can_schedule(self, req: dict, now: float) -> bool: + req_type = req.get("type", "") + safe_mode = self._image_safe_mode_active(now) + + captcha_pause_until = self._group_retry_after.get("captcha", 0.0) + captcha_cooldown_until = self._group_retry_after.get("captcha_cooldown_until", 0.0) + video_cooldown_until = self._group_retry_after.get("video_cooldown_until", 0.0) + image_pause_until = self._group_retry_after.get("image", 0.0) + image_cooldown_until = self._group_retry_after.get("image_cooldown_until", 0.0) + character_image_cooldown_until = self._group_retry_after.get("character_image_cooldown_until", 0.0) + if req_type in _CAPTCHA_CALL_TYPES: + if req_type in _VIDEO_CALL_TYPES: + if captcha_pause_until > now or video_cooldown_until > now: + return False + elif captcha_pause_until > now or captcha_cooldown_until > now: + return False + captcha_active = sum(1 for t in self._active_types.values() if t in _CAPTCHA_CALL_TYPES) + captcha_non_char_active = sum( + 1 for t in self._active_types.values() + if t in _CAPTCHA_CALL_TYPES and t not in _CHARACTER_IMAGE_CALL_TYPES + ) + max_captcha_concurrency = MAX_CONCURRENT_CAPTCHA_REQUESTS + if req_type in _VIDEO_CALL_TYPES: + max_captcha_concurrency = max( + max_captcha_concurrency, + min(MAX_CONCURRENT_VIDEO_REQUESTS, MAX_CONCURRENT_REQUESTS), + ) + if req_type in _CHARACTER_IMAGE_CALL_TYPES and captcha_non_char_active == 0: + # Ref stage (character/location) can burst slightly faster when no scene jobs are active. + max_captcha_concurrency = max( + max_captcha_concurrency, + min(MAX_CONCURRENT_CHARACTER_REF_REQUESTS, 2), + ) + if safe_mode and req_type in _IMAGE_CALL_TYPES: + max_captcha_concurrency = min(max_captcha_concurrency, 1) + if captcha_active >= max_captcha_concurrency: + return False + + if req_type in _VIDEO_CALL_TYPES: + if req_type in _LOCAL_UPSCALE_CALL_TYPES: + local_upscale_active = sum( + 1 for t in self._active_types.values() if t in _LOCAL_UPSCALE_CALL_TYPES + ) + if local_upscale_active >= max(1, MAX_CONCURRENT_LOCAL_UPSCALE_REQUESTS): + return False + video_active = sum(1 for t in self._active_types.values() if t in _VIDEO_CALL_TYPES) + if video_active >= max(1, MAX_CONCURRENT_VIDEO_REQUESTS): + return False + + if req_type in _CHARACTER_IMAGE_CALL_TYPES: + if image_pause_until > now or character_image_cooldown_until > now: + return False + char_active = sum(1 for t in self._active_types.values() if t in _CHARACTER_IMAGE_CALL_TYPES) + max_char_concurrency = MAX_CONCURRENT_CHARACTER_REF_REQUESTS + if safe_mode: + max_char_concurrency = min(max_char_concurrency, CAPTCHA_SAFE_MODE_IMAGE_CONCURRENCY) + return char_active < max_char_concurrency + if req_type in _IMAGE_CALL_TYPES: + if image_pause_until > now or image_cooldown_until > now: + return False + image_active = sum( + 1 for t in self._active_types.values() + if t in _IMAGE_CALL_TYPES and t not in _CHARACTER_IMAGE_CALL_TYPES + ) + max_image_concurrency = MAX_CONCURRENT_IMAGE_REQUESTS + if safe_mode: + max_image_concurrency = min(max_image_concurrency, CAPTCHA_SAFE_MODE_IMAGE_CONCURRENCY) + return image_active < max_image_concurrency + return True @property def active_count(self) -> int: @@ -88,6 +306,16 @@ async def drain(self, timeout: float = 30.0): async def _cleanup_stale_processing(self): """Reset any requests stuck in PROCESSING state from a previous run.""" try: + migrated = await crud.migrate_upscale_requests_to_local() + if migrated: + logger.info("Migrated %d legacy UPSCALE_VIDEO request(s) to UPSCALE_VIDEO_LOCAL", migrated) + stale_upscale = await crud.fail_stale_pending_local_upscale(STALE_PENDING_LOCAL_UPSCALE_TIMEOUT) + if stale_upscale: + logger.warning( + "Stopped %d stale PENDING UPSCALE_VIDEO_LOCAL request(s) on startup (timeout=%ss)", + stale_upscale, + STALE_PENDING_LOCAL_UPSCALE_TIMEOUT, + ) stale = await crud.list_requests(status="PROCESSING") for req in stale: await crud.update_request(req["id"], status="PENDING", @@ -114,7 +342,8 @@ async def _run_loop(self): continue pending = await crud.list_actionable_requests( - exclude_ids=self._active_ids, limit=slots_available + exclude_ids=self._active_ids, + limit=max(25, slots_available * 8), ) pending_count = len(pending) @@ -137,16 +366,51 @@ async def _run_loop(self): if rid in self._active_ids: continue + # Respect stricter image throttle + temporary captcha pause window + if not self._can_schedule(req, now): + continue + # Skip recently deferred (prereq or retry cooldown) if rid in self._deferred and self._deferred[rid] > now: continue self._deferred.pop(rid, None) - # Skip if retry backoff not elapsed + # DB `next_retry_at` is the source of truth for retry scheduling. + # If row is actionable now, clear stale in-memory backoff gate. if rid in self._retry_after and self._retry_after[rid] > now: - continue + self._retry_after.pop(rid, None) self._active_ids.add(rid) + self._active_types[rid] = req.get("type", "") + if req.get("type", "") in _IMAGE_CALL_TYPES: + cooldown_key = "image_cooldown_until" + cooldown_sec = IMAGE_API_COOLDOWN + if req.get("type", "") in _CHARACTER_IMAGE_CALL_TYPES: + cooldown_key = "character_image_cooldown_until" + cooldown_sec = CHARACTER_IMAGE_API_COOLDOWN + elif self._image_safe_mode_active(now): + cooldown_sec = max(cooldown_sec, CAPTCHA_SAFE_MODE_IMAGE_COOLDOWN) + if cooldown_sec > 0: + self._group_retry_after[cooldown_key] = max( + self._group_retry_after.get(cooldown_key, 0.0), + now + cooldown_sec, + ) + if req.get("type", "") in _CAPTCHA_CALL_TYPES: + if req.get("type", "") in _VIDEO_CALL_TYPES: + cooldown_key = "video_cooldown_until" + cooldown_sec = VIDEO_API_COOLDOWN + else: + cooldown_key = "captcha_cooldown_until" + cooldown_sec = CAPTCHA_API_COOLDOWN + if req.get("type", "") in _CHARACTER_IMAGE_CALL_TYPES: + cooldown_sec = min(cooldown_sec, CHARACTER_IMAGE_API_COOLDOWN) + if self._image_safe_mode_active(now) and req.get("type", "") in _IMAGE_CALL_TYPES: + cooldown_sec = max(cooldown_sec, CAPTCHA_SAFE_MODE_IMAGE_COOLDOWN) + if cooldown_sec > 0: + self._group_retry_after[cooldown_key] = max( + self._group_retry_after.get(cooldown_key, 0.0), + now + cooldown_sec, + ) slots_available -= 1 asyncio.create_task(self._run_one(req)) @@ -154,6 +418,20 @@ async def _run_loop(self): pending_ids = {r["id"] for r in pending} self._deferred = {k: v for k, v in self._deferred.items() if k in pending_ids} self._retry_after = {k: v for k, v in self._retry_after.items() if k in pending_ids} + if self._group_retry_after.get("image", 0.0) <= now: + self._group_retry_after.pop("image", None) + if self._group_retry_after.get("captcha", 0.0) <= now: + self._group_retry_after.pop("captcha", None) + if self._group_retry_after.get("captcha_cooldown_until", 0.0) <= now: + self._group_retry_after.pop("captcha_cooldown_until", None) + if self._group_retry_after.get("video_cooldown_until", 0.0) <= now: + self._group_retry_after.pop("video_cooldown_until", None) + if self._group_retry_after.get("image_cooldown_until", 0.0) <= now: + self._group_retry_after.pop("image_cooldown_until", None) + if self._group_retry_after.get("character_image_cooldown_until", 0.0) <= now: + self._group_retry_after.pop("character_image_cooldown_until", None) + if self._group_retry_after.get("image_safe_mode_until", 0.0) <= now: + self._group_retry_after.pop("image_safe_mode_until", None) except Exception as e: logger.exception("Worker loop error: %s", e) @@ -162,14 +440,16 @@ async def _run_loop(self): async def _run_one(self, req: dict): rid = req["id"] + req_type = req.get("type", "") try: - await self._rate_limiter.acquire() + await self._rate_limiter.acquire(req_type) try: - await _process_one(req, self._deferred, self._retry_after) + await _process_one(req, self._deferred, self._retry_after, self._group_retry_after) finally: self._rate_limiter.release() finally: self._active_ids.discard(rid) + self._active_types.pop(rid, None) async def _prerequisites_met(req: dict, orientation: str) -> bool: @@ -178,7 +458,7 @@ async def _prerequisites_met(req: dict, orientation: str) -> bool: prefix = "vertical" if orientation == "VERTICAL" else "horizontal" # Video gen needs scene image to be ready; upscale needs video to be ready - if req_type in ("GENERATE_VIDEO", "REGENERATE_VIDEO", "GENERATE_VIDEO_REFS", "UPSCALE_VIDEO"): + if req_type in ("GENERATE_VIDEO", "REGENERATE_VIDEO", "GENERATE_VIDEO_REFS", "UPSCALE_VIDEO", "UPSCALE_VIDEO_LOCAL"): scene = await crud.get_scene(req.get("scene_id")) if not scene: return True # let _dispatch handle "scene not found" @@ -186,7 +466,7 @@ async def _prerequisites_met(req: dict, orientation: str) -> bool: if not scene.get(f"{prefix}_image_media_id"): logger.info("VIDEO prereq deferred: scene=%s no %s_image_media_id", req.get("scene_id","")[:12], prefix) return False - elif req_type == "UPSCALE_VIDEO": + elif req_type in ("UPSCALE_VIDEO", "UPSCALE_VIDEO_LOCAL"): if not scene.get(f"{prefix}_video_media_id"): logger.info("UPSCALE prereq deferred: scene=%s no %s_video_media_id", req.get("scene_id","")[:12], prefix) return False @@ -220,16 +500,26 @@ async def _resolve_orientation(req: dict) -> str: """Resolve orientation from request, falling back to video table, then VERTICAL.""" orient = req.get("orientation") if orient: - return orient + return normalize_orientation(orient) vid = req.get("video_id") if vid: video = await crud.get_video(vid) if video and video.get("orientation"): - return video["orientation"] + return normalize_orientation(video["orientation"]) + pid = req.get("project_id") + if pid: + project = await crud.get_project(pid) + if project and project.get("orientation"): + return normalize_orientation(project["orientation"]) return "VERTICAL" -async def _process_one(req: dict, deferred: dict = None, retry_after: dict = None): +async def _process_one( + req: dict, + deferred: dict = None, + retry_after: dict = None, + group_retry_after: dict = None, +): rid, req_type = req["id"], req["type"] orientation = await _resolve_orientation(req) @@ -252,9 +542,10 @@ async def _process_one(req: dict, deferred: dict = None, retry_after: dict = Non elif req_type in ("GENERATE_VIDEO", "REGENERATE_VIDEO", "GENERATE_VIDEO_REFS"): skip_kwargs["media_id"] = scene.get(f"{prefix}_video_media_id") skip_kwargs["output_url"] = scene.get(f"{prefix}_video_url") - elif req_type == "UPSCALE_VIDEO": + elif req_type in ("UPSCALE_VIDEO", "UPSCALE_VIDEO_LOCAL"): skip_kwargs["media_id"] = scene.get(f"{prefix}_upscale_media_id") skip_kwargs["output_url"] = scene.get(f"{prefix}_upscale_url") + skip_kwargs["next_retry_at"] = None await crud.update_request(rid, **skip_kwargs) return @@ -265,28 +556,131 @@ async def _process_one(req: dict, deferred: dict = None, retry_after: dict = Non return logger.info("Processing request %s type=%s", rid[:8], req_type) - await crud.update_request(rid, status="PROCESSING") - await event_bus.emit("request_update", {"id": rid, "status": "PROCESSING", "type": req_type}) + await crud.update_request(rid, status="PROCESSING", next_retry_at=None) + processing_payload = { + "id": rid, + "status": "PROCESSING", + "type": req_type, + "project_id": req.get("project_id"), + "video_id": req.get("video_id"), + "scene_id": req.get("scene_id"), + "character_id": req.get("character_id"), + } + await event_bus.emit("request_update", processing_payload) try: - result = await _dispatch(req, orientation) + dispatch_timeout = REQUEST_DISPATCH_TIMEOUT + if req_type in _VIDEO_CALL_TYPES: + dispatch_timeout = max(REQUEST_DISPATCH_TIMEOUT, VIDEO_POLL_TIMEOUT + 60) + if req_type in _LOCAL_UPSCALE_CALL_TYPES: + from agent.services.local_upscaler import local_upscale_dispatch_timeout_sec + + dispatch_timeout = max(dispatch_timeout, local_upscale_dispatch_timeout_sec()) + + result = await asyncio.wait_for(_dispatch(req, orientation), timeout=dispatch_timeout) + if isinstance(result, dict) and result.get("pending") is True: + retry_after_sec_raw = result.get("retry_after_sec", 8) + try: + retry_after_sec = max(3, int(float(retry_after_sec_raw))) + except Exception: + retry_after_sec = 8 + pending_message = str(result.get("message") or "Video operation pending") + await crud.update_request( + rid, + status="PENDING", + error_message=pending_message, + next_retry_at=_iso_after(retry_after_sec), + ) + pending_payload = { + "id": rid, + "status": "PENDING", + "type": req_type, + "project_id": req.get("project_id"), + "video_id": req.get("video_id"), + "scene_id": req.get("scene_id"), + "character_id": req.get("character_id"), + "message": pending_message, + "pending": True, + "next_retry_in_sec": retry_after_sec, + } + await event_bus.emit("request_update", pending_payload) + return if _is_error(result): - await _handle_failure(rid, req, result, retry_after) + failed_payload = { + "id": rid, + "status": "FAILED", + "type": req_type, + "project_id": req.get("project_id"), + "video_id": req.get("video_id"), + "scene_id": req.get("scene_id"), + "character_id": req.get("character_id"), + "error": result.get("error") or result.get("data"), + } + await event_bus.emit("request_update", failed_payload) + await event_bus.emit("request_failed", failed_payload) + await _handle_failure(rid, req, result, retry_after, group_retry_after) else: gen_result = parse_result(result, req_type) - await crud.update_request(rid, status="COMPLETED", media_id=gen_result.media_id, output_url=gen_result.url) + await crud.update_request( + rid, + status="COMPLETED", + media_id=gen_result.media_id, + output_url=gen_result.url, + next_retry_at=None, + ) if req_type in ("GENERATE_CHARACTER_IMAGE", "REGENERATE_CHARACTER_IMAGE", "EDIT_CHARACTER_IMAGE"): char_id = req.get("character_id") if char_id: await apply_character_result(char_id, gen_result) else: await apply_scene_result(req.get("scene_id"), req_type, orientation, gen_result) - await event_bus.emit("request_update", {"id": rid, "status": "COMPLETED"}) + completed_payload = { + "id": rid, + "status": "COMPLETED", + "type": req_type, + "project_id": req.get("project_id"), + "video_id": req.get("video_id"), + "scene_id": req.get("scene_id"), + "character_id": req.get("character_id"), + "media_id": gen_result.media_id, + "output_url": gen_result.url, + } + await event_bus.emit("request_update", completed_payload) + # Backward-compatible aliases for older UI listeners. + await event_bus.emit("request_completed", completed_payload) logger.info("Request %s COMPLETED: media=%s", rid[:8], gen_result.media_id[:20] if gen_result.media_id else "?") + except asyncio.TimeoutError: + timeout_msg = f"Dispatch timeout after {dispatch_timeout}s ({req_type})" + logger.error("Request %s timeout: %s", rid[:8], timeout_msg) + failed_payload = { + "id": rid, + "status": "FAILED", + "type": req_type, + "project_id": req.get("project_id"), + "video_id": req.get("video_id"), + "scene_id": req.get("scene_id"), + "character_id": req.get("character_id"), + "error": timeout_msg, + } + await event_bus.emit("request_update", failed_payload) + await event_bus.emit("request_failed", failed_payload) + await _handle_failure(rid, req, {"error": timeout_msg}, retry_after, group_retry_after) except Exception as e: logger.exception("Request %s exception: %s", rid[:8], e) - await event_bus.emit("request_update", {"id": rid, "status": "FAILED", "error": str(e)}) - await _handle_failure(rid, req, {"error": str(e)}, retry_after) + failed_payload = { + "id": rid, + "status": "FAILED", + "type": req_type, + "project_id": req.get("project_id"), + "video_id": req.get("video_id"), + "scene_id": req.get("scene_id"), + "character_id": req.get("character_id"), + "error": str(e), + } + await event_bus.emit("request_update", failed_payload) + # Backward-compatible aliases for older UI listeners. + await event_bus.emit("request_failed", failed_payload) + await _handle_failure(rid, req, {"error": str(e)}, retry_after, group_retry_after) async def _dispatch(req: dict, orientation: str) -> dict: @@ -298,7 +692,7 @@ async def _dispatch(req: dict, orientation: str) -> dict: # Scene-based operations if req_type in ("GENERATE_IMAGE", "REGENERATE_IMAGE", "EDIT_IMAGE", - "GENERATE_VIDEO", "REGENERATE_VIDEO", "GENERATE_VIDEO_REFS", "UPSCALE_VIDEO"): + "GENERATE_VIDEO", "REGENERATE_VIDEO", "GENERATE_VIDEO_REFS", "UPSCALE_VIDEO", "UPSCALE_VIDEO_LOCAL"): scene = await crud.get_scene(req.get("scene_id")) if not scene: return {"error": "Scene not found"} @@ -312,8 +706,14 @@ async def _dispatch(req: dict, orientation: str) -> dict: return await ops.generate_scene_video(scene, orientation, request_id=rid) if req_type == "GENERATE_VIDEO_REFS": return await ops.generate_scene_video_refs(scene, orientation, request_id=rid) - if req_type == "UPSCALE_VIDEO": - return await ops.upscale_scene_video(scene, orientation, request_id=rid) + if req_type in ("UPSCALE_VIDEO", "UPSCALE_VIDEO_LOCAL"): + from agent.services.local_upscaler import upscale_scene_video_local + + return await upscale_scene_video_local( + scene, + orientation, + project_id=pid, + ) # Character operations if req_type in ("GENERATE_CHARACTER_IMAGE", "REGENERATE_CHARACTER_IMAGE", "EDIT_CHARACTER_IMAGE"): @@ -381,7 +781,7 @@ async def _recover_entity_not_found(req: dict) -> bool: prefix = "vertical" if orientation == "VERTICAL" else "horizontal" # Scene-based requests: re-upload scene image - if req_type in ("GENERATE_VIDEO", "REGENERATE_VIDEO", "GENERATE_VIDEO_REFS", "UPSCALE_VIDEO"): + if req_type in ("GENERATE_VIDEO", "REGENERATE_VIDEO", "GENERATE_VIDEO_REFS", "UPSCALE_VIDEO", "UPSCALE_VIDEO_LOCAL"): scene = await crud.get_scene(req.get("scene_id")) if not scene: return False @@ -411,7 +811,16 @@ async def _recover_entity_not_found(req: dict) -> bool: return False -async def _handle_failure(rid: str, req: dict, result: dict, retry_after: dict = None): +async def _handle_failure( + rid: str, + req: dict, + result: dict, + retry_after: dict = None, + group_retry_after: dict = None, +): + if retry_after is None: + retry_after = _retry_state + error_msg = result.get("error") if not error_msg: data = result.get("data", {}) @@ -433,50 +842,187 @@ async def _handle_failure(rid: str, req: dict, result: dict, retry_after: dict = error_msg = "Unknown error" if isinstance(error_msg, dict): error_msg = json.dumps(error_msg)[:200] + if not str(error_msg).strip(): + error_msg = "Unknown error" + + # Reconcile stale state: operation may have completed but app request got a transient + # poll/read mismatch. If we can confirm SUCCESS from check-status, mark COMPLETED. + if await _try_reconcile_operation_success(rid, req, error_msg): + return # Auto-recover expired media by re-uploading if "not found" in str(error_msg).lower(): recovered = await _recover_entity_not_found(req) if recovered: logger.info("Request %s: recovered expired media, retrying", rid[:8]) - await crud.update_request(rid, status="PENDING", error_message=f"recovered: {error_msg}") + await crud.update_request(rid, status="PENDING", error_message=f"recovered: {error_msg}", next_retry_at=None) return error_lower = str(error_msg).lower() # WS transient errors (extension disconnect/reconnect): retry without incrementing count if "extension reconnected" in error_lower or "extension disconnected" in error_lower or "extension not connected" in error_lower: - await crud.update_request(rid, status="PENDING", error_message=str(error_msg)) + await crud.update_request(rid, status="PENDING", error_message=str(error_msg), next_retry_at=None) logger.info("Request %s transient WS error, will retry (no retry increment): %s", rid[:8], error_msg) return - # reCAPTCHA errors: retry up to 10 times — deferred dict in main loop handles delay + # Flow tab/runtime unavailable: don't burn captcha retry budget. + if _is_flow_tab_unavailable_error(error_lower): + delay = max(15, CAPTCHA_CONTENT_TIMEOUT_PAUSE_SEC // 2) + if retry_after is not None: + retry_after[rid] = time.time() + delay + if group_retry_after is not None and req.get("type", "") in _CAPTCHA_CALL_TYPES: + pause_until = time.time() + max(delay, CAPTCHA_GROUP_PAUSE_SEC) + group_retry_after["captcha"] = max(group_retry_after.get("captcha", 0.0), pause_until) + if req.get("type", "") in _IMAGE_CALL_TYPES: + group_retry_after["image"] = max(group_retry_after.get("image", 0.0), pause_until) + await crud.update_request( + rid, + status="PENDING", + error_message=str(error_msg), + next_retry_at=_iso_after(delay), + ) + try: + # Trigger extension warm-up (open/refresh Flow tab + token) opportunistically. + await get_flow_client().refresh_token() + except Exception: + pass + logger.warning( + "Request %s Flow tab unavailable, deferred %ss without increasing retry_count: %s", + rid[:8], delay, error_msg + ) + return + + # reCAPTCHA errors: exponential backoff + temporary pause for all captcha-consuming requests if "captcha" in error_lower or "recaptcha" in error_lower: retry = req.get("retry_count", 0) + 1 - if retry < 10: - await crud.update_request(rid, status="PENDING", retry_count=retry, error_message=str(error_msg)) - logger.warning("Request %s reCAPTCHA failed (retry %d/10), will retry", rid[:8], retry) + if retry < CAPTCHA_RETRY_LIMIT: + delay = int(min(CAPTCHA_RETRY_BACKOFF_BASE * (1.6 ** (retry - 1)), CAPTCHA_RETRY_BACKOFF_MAX)) + is_traffic = _is_unusual_traffic_error(error_lower) + is_timeout = _is_captcha_timeout_error(error_lower) + if is_timeout: + delay = max(delay, CAPTCHA_CONTENT_TIMEOUT_PAUSE_SEC) + if is_traffic: + delay = max(delay, CAPTCHA_TRAFFIC_PAUSE_SEC) + until = time.time() + delay + if retry_after is not None: + retry_after[rid] = until + if group_retry_after is not None and req.get("type", "") in _CAPTCHA_CALL_TYPES: + group_pause_until = time.time() + max(delay, CAPTCHA_GROUP_PAUSE_SEC) + group_retry_after["captcha"] = max(group_retry_after.get("captcha", 0.0), group_pause_until) + if req.get("type", "") in _IMAGE_CALL_TYPES: + group_retry_after["image"] = max(group_retry_after.get("image", 0.0), group_pause_until) + if is_traffic: + safe_until = time.time() + max(delay, CAPTCHA_SAFE_MODE_SEC) + group_retry_after["captcha"] = max(group_retry_after.get("captcha", 0.0), safe_until) + group_retry_after["captcha_cooldown_until"] = max( + group_retry_after.get("captcha_cooldown_until", 0.0), + safe_until, + ) + if req.get("type", "") in _IMAGE_CALL_TYPES: + group_retry_after["image_safe_mode_until"] = max( + group_retry_after.get("image_safe_mode_until", 0.0), + safe_until, + ) + await crud.update_request( + rid, + status="PENDING", + retry_count=retry, + error_message=str(error_msg), + next_retry_at=_iso_after(delay), + ) + if retry <= 2: + try: + await get_flow_client().refresh_token() + except Exception: + pass + logger.warning( + "Request %s reCAPTCHA failed (retry %d/%d), backoff=%ds, traffic=%s timeout=%s", + rid[:8], retry, CAPTCHA_RETRY_LIMIT - 1, delay, is_traffic, is_timeout + ) return else: - await crud.update_request(rid, status="FAILED", error_message=str(error_msg)) + await crud.update_request(rid, status="FAILED", error_message=str(error_msg), next_retry_at=None) await _mark_scene_failed(req) - logger.error("Request %s FAILED after 10 reCAPTCHA retries: %s", rid[:8], error_msg) + logger.error( + "Request %s FAILED after %d reCAPTCHA retries: %s", + rid[:8], CAPTCHA_RETRY_LIMIT - 1, error_msg + ) + return + + # Safety-filter blocks are usually deterministic for the same prompt. + # We already do one auto-safe prompt retry in OperationService; if it still fails, + # fail fast with a clear hint instead of burning the generic retry budget. + if _is_unsafe_generation_error(error_lower): + msg = ( + "Google Flow chan boi bo loc an toan (PUBLIC_ERROR_UNSAFE_GENERATION). " + "He thong da thu auto-safe prompt nhung van bi chan. " + "Hay giam noi dung nhay cam/bao luc/18+/thu ghet va tao lai." + ) + await crud.update_request(rid, status="FAILED", error_message=msg, next_retry_at=None) + await _mark_scene_failed(req) + logger.warning("Request %s FAILED by safety filter: %s", rid[:8], error_msg) + return + + if _LOCAL_UPSCALE_SETUP_MARKER in error_lower: + await crud.update_request(rid, status="FAILED", error_message=str(error_msg), next_retry_at=None) + await _mark_scene_failed(req) + logger.error("Request %s local upscale setup missing: %s", rid[:8], error_msg) + return + + # Operation failed with operation-id is often a transient bridge/poll mismatch. + # Retry with a calmer backoff before marking FAILED. + if _extract_operation_name_from_error(error_msg): + retry = req.get("retry_count", 0) + 1 + if retry < max(MAX_RETRIES, 8): + delay = min(OPERATION_FAILED_RETRY_BASE_SEC * retry, 600) + if retry_after is not None: + retry_after[rid] = time.time() + delay + await crud.update_request( + rid, + status="PENDING", + retry_count=retry, + error_message=str(error_msg), + next_retry_at=_iso_after(delay), + ) + logger.warning( + "Request %s operation-failed transient (retry %d), defer=%ss: %s", + rid[:8], retry, delay, error_msg + ) return + if req.get("type") in _LOCAL_UPSCALE_CALL_TYPES and "dispatch timeout" in error_lower: + await crud.update_request( + rid, + status="FAILED", + error_message=str(error_msg), + next_retry_at=None, + ) + await _mark_scene_failed(req) + logger.error("Request %s local upscale dispatch-timeout => FAILED: %s", rid[:8], error_msg) + return + retry = req.get("retry_count", 0) + 1 if retry < MAX_RETRIES: now = time.time() + delay = min(2 ** retry * 10, 300) if retry_after is not None: ra = retry_after.get(rid, 0.0) if ra > now: # Still in backoff — reset to PENDING so it's not stuck in PROCESSING - await crud.update_request(rid, status="PENDING", error_message=str(error_msg)) + await crud.update_request(rid, status="PENDING", error_message=str(error_msg), next_retry_at=_iso_after(ra - now)) return - retry_after[rid] = now + min(2 ** retry * 10, 300) - await crud.update_request(rid, status="PENDING", retry_count=retry, error_message=str(error_msg)) + retry_after[rid] = now + delay + await crud.update_request( + rid, + status="PENDING", + retry_count=retry, + error_message=str(error_msg), + next_retry_at=_iso_after(delay), + ) logger.warning("Request %s failed (retry %d/%d): %s", rid[:8], retry, MAX_RETRIES, error_msg) else: - await crud.update_request(rid, status="FAILED", error_message=str(error_msg)) + await crud.update_request(rid, status="FAILED", error_message=str(error_msg), next_retry_at=None) await _mark_scene_failed(req) logger.error("Request %s FAILED permanently: %s", rid[:8], error_msg) @@ -488,21 +1034,127 @@ async def _mark_scene_failed(req: dict): orientation = await _resolve_orientation(req) prefix = "vertical" if orientation == "VERTICAL" else "horizontal" req_type = req["type"] + + scene = None + try: + maybe_scene = crud.get_scene(scene_id) + if asyncio.iscoroutine(maybe_scene): + scene = await maybe_scene + else: + scene = maybe_scene + except TypeError: + # Tests may patch crud as non-async MagicMock without get_scene awaitable. + scene = None + + if scene: + # Do not downgrade a stage that already has a completed media result. + if req_type in ("GENERATE_IMAGE", "REGENERATE_IMAGE", "EDIT_IMAGE"): + if scene.get(f"{prefix}_image_status") == "COMPLETED" and scene.get(f"{prefix}_image_media_id"): + logger.info("Skip marking image FAILED for scene %s: already COMPLETED with media", scene_id[:12]) + return + elif req_type in ("GENERATE_VIDEO", "REGENERATE_VIDEO", "GENERATE_VIDEO_REFS"): + if scene.get(f"{prefix}_video_status") == "COMPLETED" and scene.get(f"{prefix}_video_media_id"): + logger.info("Skip marking video FAILED for scene %s: already COMPLETED with media", scene_id[:12]) + return + elif req_type in ("UPSCALE_VIDEO", "UPSCALE_VIDEO_LOCAL"): + if scene.get(f"{prefix}_upscale_status") == "COMPLETED" and ( + scene.get(f"{prefix}_upscale_media_id") or scene.get(f"{prefix}_upscale_url") + ): + logger.info("Skip marking upscale FAILED for scene %s: already COMPLETED with media", scene_id[:12]) + return + updates = {} if req_type in ("GENERATE_IMAGE", "REGENERATE_IMAGE", "EDIT_IMAGE"): updates[f"{prefix}_image_status"] = "FAILED" elif req_type in ("GENERATE_VIDEO", "REGENERATE_VIDEO", "GENERATE_VIDEO_REFS"): updates[f"{prefix}_video_status"] = "FAILED" - elif req_type == "UPSCALE_VIDEO": + elif req_type in ("UPSCALE_VIDEO", "UPSCALE_VIDEO_LOCAL"): updates[f"{prefix}_upscale_status"] = "FAILED" if updates: await crud.update_scene(scene_id, **updates) +def _extract_operation_name_from_error(error_msg: str | None) -> str | None: + if not error_msg: + return None + m = _OP_NAME_RE.search(str(error_msg)) + if not m: + return None + return m.group(1) + + +async def _try_reconcile_operation_success(rid: str, req: dict, error_msg: str | None) -> bool: + """If request has an operation id, re-check it once and recover COMPLETED state.""" + req_type = req.get("type", "") + if req_type not in ("GENERATE_VIDEO", "REGENERATE_VIDEO", "GENERATE_VIDEO_REFS"): + return False + + req_row = await crud.get_request(rid) + op_name = (req_row or {}).get("request_id") or req.get("request_id") or _extract_operation_name_from_error(error_msg) + if not op_name: + return False + + client = get_flow_client() + if not client.connected: + return False + + status_result = await client.check_video_status([{"operation": {"name": op_name}}]) + if _is_error(status_result): + return False + + data = status_result.get("data", status_result) + ops = data.get("operations", []) if isinstance(data, dict) else [] + if not ops: + return False + + if ops[0].get("status") != "MEDIA_GENERATION_STATUS_SUCCESSFUL": + return False + + gen_result = parse_result(status_result, req_type) + if not gen_result.success: + return False + + await crud.update_request( + rid, + status="COMPLETED", + request_id=op_name, + media_id=gen_result.media_id, + output_url=gen_result.url, + error_message=f"reconciled after transient failure: {error_msg or 'unknown'}", + next_retry_at=None, + ) + + if req.get("scene_id"): + orientation = await _resolve_orientation(req) + await apply_scene_result(req.get("scene_id"), req_type, orientation, gen_result) + + payload = { + "id": rid, + "status": "COMPLETED", + "type": req_type, + "project_id": req.get("project_id"), + "video_id": req.get("video_id"), + "scene_id": req.get("scene_id"), + "character_id": req.get("character_id"), + "media_id": gen_result.media_id, + "output_url": gen_result.url, + } + await event_bus.emit("request_update", payload) + await event_bus.emit("request_completed", payload) + logger.info("Reconciled request %s to COMPLETED via operation status: %s", rid[:8], op_name) + return True + + async def _is_already_completed(req: dict, orientation: str) -> bool: scene_id = req.get("scene_id") req_type = req.get("type", "") - if not scene_id or req_type == "GENERATE_CHARACTER_IMAGE": + if req_type == "GENERATE_CHARACTER_IMAGE": + char_id = req.get("character_id") + if not char_id: + return False + char = await crud.get_character(char_id) + return bool(char and char.get("media_id")) + if not scene_id: return False scene = await crud.get_scene(scene_id) if not scene: @@ -514,7 +1166,7 @@ async def _is_already_completed(req: dict, orientation: str) -> bool: return scene.get(f"{prefix}_image_status") == "COMPLETED" if req_type in ("GENERATE_VIDEO", "GENERATE_VIDEO_REFS"): return scene.get(f"{prefix}_video_status") == "COMPLETED" - if req_type == "UPSCALE_VIDEO": + if req_type in ("UPSCALE_VIDEO", "UPSCALE_VIDEO_LOCAL"): return scene.get(f"{prefix}_upscale_status") == "COMPLETED" return False diff --git a/build-agent.sh b/build-agent.sh new file mode 100755 index 0000000..cb9f35c --- /dev/null +++ b/build-agent.sh @@ -0,0 +1,42 @@ +#!/usr/bin/env bash +# build-agent.sh — Bundle the FlowKit Python agent into a standalone binary via PyInstaller +# Run this on each target OS (macOS, Windows) to produce the OS-specific binary. +# The output binary is placed in desktop/resources/agent/flowkit-agent[.exe] +# ───────────────────────────────────────────────────────────────────────────── +set -euo pipefail + +ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +VENV="$ROOT/venv" +OUTPUT="$ROOT/desktop/resources/agent" + +echo "==> Activating venv..." +source "$VENV/bin/activate" + +echo "==> Installing PyInstaller..." +pip install pyinstaller --quiet + +echo "==> Bundling agent..." +pyinstaller \ + --noconfirm \ + --onefile \ + --name flowkit-agent \ + --distpath "$OUTPUT" \ + --workpath /tmp/flowkit-build \ + --specpath /tmp/flowkit-spec \ + --add-data "$ROOT/agent:agent" \ + --hidden-import uvicorn \ + --hidden-import uvicorn.logging \ + --hidden-import uvicorn.loops \ + --hidden-import uvicorn.loops.auto \ + --hidden-import uvicorn.protocols \ + --hidden-import uvicorn.protocols.http \ + --hidden-import uvicorn.protocols.http.auto \ + --hidden-import uvicorn.protocols.websockets \ + --hidden-import uvicorn.protocols.websockets.auto \ + --hidden-import uvicorn.lifespan \ + --hidden-import uvicorn.lifespan.on \ + --hidden-import websockets \ + "$ROOT/agent/main.py" + +echo "==> Done! Binary at: $OUTPUT/flowkit-agent" +ls -lh "$OUTPUT/" diff --git a/cloudflare-license/migrations/0001_init.sql b/cloudflare-license/migrations/0001_init.sql new file mode 100644 index 0000000..dc0a93e --- /dev/null +++ b/cloudflare-license/migrations/0001_init.sql @@ -0,0 +1,39 @@ +CREATE TABLE IF NOT EXISTS devices ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + machine_hash TEXT NOT NULL UNIQUE, + machine_hint TEXT NOT NULL, + first_seen TEXT NOT NULL, + last_seen TEXT NOT NULL, + last_app_version TEXT, + last_platform TEXT +); + +CREATE INDEX IF NOT EXISTS idx_devices_last_seen ON devices(last_seen DESC); + +CREATE TABLE IF NOT EXISTS licenses ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + machine_hash TEXT NOT NULL, + plan_code TEXT NOT NULL, + status TEXT NOT NULL, + activated_at TEXT NOT NULL, + expires_at TEXT, + created_by TEXT NOT NULL DEFAULT 'admin', + note TEXT, + revoked_at TEXT, + revoked_reason TEXT +); + +CREATE INDEX IF NOT EXISTS idx_licenses_machine_hash ON licenses(machine_hash); +CREATE INDEX IF NOT EXISTS idx_licenses_status ON licenses(status); +CREATE INDEX IF NOT EXISTS idx_licenses_activated_at ON licenses(activated_at DESC); + +CREATE TABLE IF NOT EXISTS audit_logs ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + actor TEXT NOT NULL, + action TEXT NOT NULL, + machine_hash TEXT, + detail_json TEXT, + created_at TEXT NOT NULL +); + +CREATE INDEX IF NOT EXISTS idx_audit_logs_created_at ON audit_logs(created_at DESC); diff --git a/cloudflare-license/migrations/0002_add_machine_id.sql b/cloudflare-license/migrations/0002_add_machine_id.sql new file mode 100644 index 0000000..b7f251a --- /dev/null +++ b/cloudflare-license/migrations/0002_add_machine_id.sql @@ -0,0 +1,3 @@ +ALTER TABLE devices ADD COLUMN machine_id TEXT; + +CREATE INDEX IF NOT EXISTS idx_devices_machine_id ON devices(machine_id); diff --git a/cloudflare-license/package-lock.json b/cloudflare-license/package-lock.json new file mode 100644 index 0000000..5a558cd --- /dev/null +++ b/cloudflare-license/package-lock.json @@ -0,0 +1,1529 @@ +{ + "name": "flowkit-license-worker", + "version": "0.1.0", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "flowkit-license-worker", + "version": "0.1.0", + "devDependencies": { + "@cloudflare/workers-types": "^4.20260415.0", + "typescript": "^5.6.3", + "wrangler": "^4.13.2" + } + }, + "node_modules/@cloudflare/kv-asset-handler": { + "version": "0.4.2", + "resolved": "https://registry.npmjs.org/@cloudflare/kv-asset-handler/-/kv-asset-handler-0.4.2.tgz", + "integrity": "sha512-SIOD2DxrRRwQ+jgzlXCqoEFiKOFqaPjhnNTGKXSRLvp1HiOvapLaFG2kEr9dYQTYe8rKrd9uvDUzmAITeNyaHQ==", + "dev": true, + "license": "MIT OR Apache-2.0", + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@cloudflare/unenv-preset": { + "version": "2.16.0", + "resolved": "https://registry.npmjs.org/@cloudflare/unenv-preset/-/unenv-preset-2.16.0.tgz", + "integrity": "sha512-8ovsRpwzPoEqPUzoErAYVv8l3FMZNeBVQfJTvtzP4AgLSRGZISRfuChFxHWUQd3n6cnrwkuTGxT+2cGo8EsyYg==", + "dev": true, + "license": "MIT OR Apache-2.0", + "peerDependencies": { + "unenv": "2.0.0-rc.24", + "workerd": "1.20260301.1 || ~1.20260302.1 || ~1.20260303.1 || ~1.20260304.1 || >1.20260305.0 <2.0.0-0" + }, + "peerDependenciesMeta": { + "workerd": { + "optional": true + } + } + }, + "node_modules/@cloudflare/workerd-darwin-64": { + "version": "1.20260420.1", + "resolved": "https://registry.npmjs.org/@cloudflare/workerd-darwin-64/-/workerd-darwin-64-1.20260420.1.tgz", + "integrity": "sha512-Y6HtAY+pS5INiD9HyO1JvvujZO24mD3eqRwPZlLXBkcT+wW8bTOve/8mVKErEzEtZ5LkuT3tJqG9py8TxQEBgw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "Apache-2.0", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=16" + } + }, + "node_modules/@cloudflare/workerd-darwin-arm64": { + "version": "1.20260420.1", + "resolved": "https://registry.npmjs.org/@cloudflare/workerd-darwin-arm64/-/workerd-darwin-arm64-1.20260420.1.tgz", + "integrity": "sha512-7aiRtZTc5S4aKcL6uIx+B3tCzb/bULjQmE67/03k0HtaDNzP20GnYmYpFCqleFqsdmIb4Tx8PkKPmsXI3AJLvQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "Apache-2.0", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=16" + } + }, + "node_modules/@cloudflare/workerd-linux-64": { + "version": "1.20260420.1", + "resolved": "https://registry.npmjs.org/@cloudflare/workerd-linux-64/-/workerd-linux-64-1.20260420.1.tgz", + "integrity": "sha512-J/DW149FPmug1wSM32zBF7My14xg+inIYwzS4bSAxyXR6tBiTxbhgFWQQz99nt08ZMstdKHRD6f6C/KQaleQcA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "Apache-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=16" + } + }, + "node_modules/@cloudflare/workerd-linux-arm64": { + "version": "1.20260420.1", + "resolved": "https://registry.npmjs.org/@cloudflare/workerd-linux-arm64/-/workerd-linux-arm64-1.20260420.1.tgz", + "integrity": "sha512-a5I147McRM/L4YHu9EwOsoAyIExZndPRQoLx/33dbw/yUEnO825gvn5QZkCGXBVL2JwsPAyowB0Xliqrj+71Sw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "Apache-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=16" + } + }, + "node_modules/@cloudflare/workerd-windows-64": { + "version": "1.20260420.1", + "resolved": "https://registry.npmjs.org/@cloudflare/workerd-windows-64/-/workerd-windows-64-1.20260420.1.tgz", + "integrity": "sha512-ZrHqlHbJNU8P24EAOBaZ6B44G9P+po2z0DBwbAr8965aWR+vohy3cfmgE9uzNPAQfKNmvq7fmc4VwsRpERkg0w==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "Apache-2.0", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=16" + } + }, + "node_modules/@cloudflare/workers-types": { + "version": "4.20260421.1", + "resolved": "https://registry.npmjs.org/@cloudflare/workers-types/-/workers-types-4.20260421.1.tgz", + "integrity": "sha512-PJjuz1zwwa+/WP9dkf5ORMQWL7u2m1d8aFUhG3dx6ohweGd+zMppT1JG0zhc00LUg8gFXVaiZzZ5w/0Cp4HI+g==", + "dev": true, + "license": "MIT OR Apache-2.0", + "peer": true + }, + "node_modules/@cspotcode/source-map-support": { + "version": "0.8.1", + "resolved": "https://registry.npmjs.org/@cspotcode/source-map-support/-/source-map-support-0.8.1.tgz", + "integrity": "sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/trace-mapping": "0.3.9" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/@emnapi/runtime": { + "version": "1.10.0", + "resolved": "https://registry.npmjs.org/@emnapi/runtime/-/runtime-1.10.0.tgz", + "integrity": "sha512-ewvYlk86xUoGI0zQRNq/mC+16R1QeDlKQy21Ki3oSYXNgLb45GV1P6A0M+/s6nyCuNDqe5VpaY84BzXGwVbwFA==", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "tslib": "^2.4.0" + } + }, + "node_modules/@esbuild/aix-ppc64": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.27.3.tgz", + "integrity": "sha512-9fJMTNFTWZMh5qwrBItuziu834eOCUcEqymSH7pY+zoMVEZg3gcPuBNxH1EvfVYe9h0x/Ptw8KBzv7qxb7l8dg==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "aix" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/android-arm": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.27.3.tgz", + "integrity": "sha512-i5D1hPY7GIQmXlXhs2w8AWHhenb00+GxjxRncS2ZM7YNVGNfaMxgzSGuO8o8SJzRc/oZwU2bcScvVERk03QhzA==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/android-arm64": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.27.3.tgz", + "integrity": "sha512-YdghPYUmj/FX2SYKJ0OZxf+iaKgMsKHVPF1MAq/P8WirnSpCStzKJFjOjzsW0QQ7oIAiccHdcqjbHmJxRb/dmg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/android-x64": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.27.3.tgz", + "integrity": "sha512-IN/0BNTkHtk8lkOM8JWAYFg4ORxBkZQf9zXiEOfERX/CzxW3Vg1ewAhU7QSWQpVIzTW+b8Xy+lGzdYXV6UZObQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/darwin-arm64": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.27.3.tgz", + "integrity": "sha512-Re491k7ByTVRy0t3EKWajdLIr0gz2kKKfzafkth4Q8A5n1xTHrkqZgLLjFEHVD+AXdUGgQMq+Godfq45mGpCKg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/darwin-x64": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.27.3.tgz", + "integrity": "sha512-vHk/hA7/1AckjGzRqi6wbo+jaShzRowYip6rt6q7VYEDX4LEy1pZfDpdxCBnGtl+A5zq8iXDcyuxwtv3hNtHFg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/freebsd-arm64": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.27.3.tgz", + "integrity": "sha512-ipTYM2fjt3kQAYOvo6vcxJx3nBYAzPjgTCk7QEgZG8AUO3ydUhvelmhrbOheMnGOlaSFUoHXB6un+A7q4ygY9w==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/freebsd-x64": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.27.3.tgz", + "integrity": "sha512-dDk0X87T7mI6U3K9VjWtHOXqwAMJBNN2r7bejDsc+j03SEjtD9HrOl8gVFByeM0aJksoUuUVU9TBaZa2rgj0oA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-arm": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.27.3.tgz", + "integrity": "sha512-s6nPv2QkSupJwLYyfS+gwdirm0ukyTFNl3KTgZEAiJDd+iHZcbTPPcWCcRYH+WlNbwChgH2QkE9NSlNrMT8Gfw==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-arm64": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.27.3.tgz", + "integrity": "sha512-sZOuFz/xWnZ4KH3YfFrKCf1WyPZHakVzTiqji3WDc0BCl2kBwiJLCXpzLzUBLgmp4veFZdvN5ChW4Eq/8Fc2Fg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-ia32": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.27.3.tgz", + "integrity": "sha512-yGlQYjdxtLdh0a3jHjuwOrxQjOZYD/C9PfdbgJJF3TIZWnm/tMd/RcNiLngiu4iwcBAOezdnSLAwQDPqTmtTYg==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-loong64": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.27.3.tgz", + "integrity": "sha512-WO60Sn8ly3gtzhyjATDgieJNet/KqsDlX5nRC5Y3oTFcS1l0KWba+SEa9Ja1GfDqSF1z6hif/SkpQJbL63cgOA==", + "cpu": [ + "loong64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-mips64el": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.27.3.tgz", + "integrity": "sha512-APsymYA6sGcZ4pD6k+UxbDjOFSvPWyZhjaiPyl/f79xKxwTnrn5QUnXR5prvetuaSMsb4jgeHewIDCIWljrSxw==", + "cpu": [ + "mips64el" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-ppc64": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.27.3.tgz", + "integrity": "sha512-eizBnTeBefojtDb9nSh4vvVQ3V9Qf9Df01PfawPcRzJH4gFSgrObw+LveUyDoKU3kxi5+9RJTCWlj4FjYXVPEA==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-riscv64": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.27.3.tgz", + "integrity": "sha512-3Emwh0r5wmfm3ssTWRQSyVhbOHvqegUDRd0WhmXKX2mkHJe1SFCMJhagUleMq+Uci34wLSipf8Lagt4LlpRFWQ==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-s390x": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.27.3.tgz", + "integrity": "sha512-pBHUx9LzXWBc7MFIEEL0yD/ZVtNgLytvx60gES28GcWMqil8ElCYR4kvbV2BDqsHOvVDRrOxGySBM9Fcv744hw==", + "cpu": [ + "s390x" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-x64": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.27.3.tgz", + "integrity": "sha512-Czi8yzXUWIQYAtL/2y6vogER8pvcsOsk5cpwL4Gk5nJqH5UZiVByIY8Eorm5R13gq+DQKYg0+JyQoytLQas4dA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/netbsd-arm64": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.27.3.tgz", + "integrity": "sha512-sDpk0RgmTCR/5HguIZa9n9u+HVKf40fbEUt+iTzSnCaGvY9kFP0YKBWZtJaraonFnqef5SlJ8/TiPAxzyS+UoA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/netbsd-x64": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.27.3.tgz", + "integrity": "sha512-P14lFKJl/DdaE00LItAukUdZO5iqNH7+PjoBm+fLQjtxfcfFE20Xf5CrLsmZdq5LFFZzb5JMZ9grUwvtVYzjiA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/openbsd-arm64": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.27.3.tgz", + "integrity": "sha512-AIcMP77AvirGbRl/UZFTq5hjXK+2wC7qFRGoHSDrZ5v5b8DK/GYpXW3CPRL53NkvDqb9D+alBiC/dV0Fb7eJcw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/openbsd-x64": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.27.3.tgz", + "integrity": "sha512-DnW2sRrBzA+YnE70LKqnM3P+z8vehfJWHXECbwBmH/CU51z6FiqTQTHFenPlHmo3a8UgpLyH3PT+87OViOh1AQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/openharmony-arm64": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/openharmony-arm64/-/openharmony-arm64-0.27.3.tgz", + "integrity": "sha512-NinAEgr/etERPTsZJ7aEZQvvg/A6IsZG/LgZy+81wON2huV7SrK3e63dU0XhyZP4RKGyTm7aOgmQk0bGp0fy2g==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openharmony" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/sunos-x64": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.27.3.tgz", + "integrity": "sha512-PanZ+nEz+eWoBJ8/f8HKxTTD172SKwdXebZ0ndd953gt1HRBbhMsaNqjTyYLGLPdoWHy4zLU7bDVJztF5f3BHA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "sunos" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/win32-arm64": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.27.3.tgz", + "integrity": "sha512-B2t59lWWYrbRDw/tjiWOuzSsFh1Y/E95ofKz7rIVYSQkUYBjfSgf6oeYPNWHToFRr2zx52JKApIcAS/D5TUBnA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/win32-ia32": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.27.3.tgz", + "integrity": "sha512-QLKSFeXNS8+tHW7tZpMtjlNb7HKau0QDpwm49u0vUp9y1WOF+PEzkU84y9GqYaAVW8aH8f3GcBck26jh54cX4Q==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/win32-x64": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.27.3.tgz", + "integrity": "sha512-4uJGhsxuptu3OcpVAzli+/gWusVGwZZHTlS63hh++ehExkVT8SgiEf7/uC/PclrPPkLhZqGgCTjd0VWLo6xMqA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@img/colour": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@img/colour/-/colour-1.1.0.tgz", + "integrity": "sha512-Td76q7j57o/tLVdgS746cYARfSyxk8iEfRxewL9h4OMzYhbW4TAcppl0mT4eyqXddh6L/jwoM75mo7ixa/pCeQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + } + }, + "node_modules/@img/sharp-darwin-arm64": { + "version": "0.34.5", + "resolved": "https://registry.npmjs.org/@img/sharp-darwin-arm64/-/sharp-darwin-arm64-0.34.5.tgz", + "integrity": "sha512-imtQ3WMJXbMY4fxb/Ndp6HBTNVtWCUI0WdobyheGf5+ad6xX8VIDO8u2xE4qc/fr08CKG/7dDseFtn6M6g/r3w==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "Apache-2.0", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^18.17.0 || ^20.3.0 || >=21.0.0" + }, + "funding": { + "url": "https://opencollective.com/libvips" + }, + "optionalDependencies": { + "@img/sharp-libvips-darwin-arm64": "1.2.4" + } + }, + "node_modules/@img/sharp-darwin-x64": { + "version": "0.34.5", + "resolved": "https://registry.npmjs.org/@img/sharp-darwin-x64/-/sharp-darwin-x64-0.34.5.tgz", + "integrity": "sha512-YNEFAF/4KQ/PeW0N+r+aVVsoIY0/qxxikF2SWdp+NRkmMB7y9LBZAVqQ4yhGCm/H3H270OSykqmQMKLBhBJDEw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "Apache-2.0", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^18.17.0 || ^20.3.0 || >=21.0.0" + }, + "funding": { + "url": "https://opencollective.com/libvips" + }, + "optionalDependencies": { + "@img/sharp-libvips-darwin-x64": "1.2.4" + } + }, + "node_modules/@img/sharp-libvips-darwin-arm64": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/@img/sharp-libvips-darwin-arm64/-/sharp-libvips-darwin-arm64-1.2.4.tgz", + "integrity": "sha512-zqjjo7RatFfFoP0MkQ51jfuFZBnVE2pRiaydKJ1G/rHZvnsrHAOcQALIi9sA5co5xenQdTugCvtb1cuf78Vf4g==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "LGPL-3.0-or-later", + "optional": true, + "os": [ + "darwin" + ], + "funding": { + "url": "https://opencollective.com/libvips" + } + }, + "node_modules/@img/sharp-libvips-darwin-x64": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/@img/sharp-libvips-darwin-x64/-/sharp-libvips-darwin-x64-1.2.4.tgz", + "integrity": "sha512-1IOd5xfVhlGwX+zXv2N93k0yMONvUlANylbJw1eTah8K/Jtpi15KC+WSiaX/nBmbm2HxRM1gZ0nSdjSsrZbGKg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "LGPL-3.0-or-later", + "optional": true, + "os": [ + "darwin" + ], + "funding": { + "url": "https://opencollective.com/libvips" + } + }, + "node_modules/@img/sharp-libvips-linux-arm": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linux-arm/-/sharp-libvips-linux-arm-1.2.4.tgz", + "integrity": "sha512-bFI7xcKFELdiNCVov8e44Ia4u2byA+l3XtsAj+Q8tfCwO6BQ8iDojYdvoPMqsKDkuoOo+X6HZA0s0q11ANMQ8A==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "LGPL-3.0-or-later", + "optional": true, + "os": [ + "linux" + ], + "funding": { + "url": "https://opencollective.com/libvips" + } + }, + "node_modules/@img/sharp-libvips-linux-arm64": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linux-arm64/-/sharp-libvips-linux-arm64-1.2.4.tgz", + "integrity": "sha512-excjX8DfsIcJ10x1Kzr4RcWe1edC9PquDRRPx3YVCvQv+U5p7Yin2s32ftzikXojb1PIFc/9Mt28/y+iRklkrw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "LGPL-3.0-or-later", + "optional": true, + "os": [ + "linux" + ], + "funding": { + "url": "https://opencollective.com/libvips" + } + }, + "node_modules/@img/sharp-libvips-linux-ppc64": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linux-ppc64/-/sharp-libvips-linux-ppc64-1.2.4.tgz", + "integrity": "sha512-FMuvGijLDYG6lW+b/UvyilUWu5Ayu+3r2d1S8notiGCIyYU/76eig1UfMmkZ7vwgOrzKzlQbFSuQfgm7GYUPpA==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "LGPL-3.0-or-later", + "optional": true, + "os": [ + "linux" + ], + "funding": { + "url": "https://opencollective.com/libvips" + } + }, + "node_modules/@img/sharp-libvips-linux-riscv64": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linux-riscv64/-/sharp-libvips-linux-riscv64-1.2.4.tgz", + "integrity": "sha512-oVDbcR4zUC0ce82teubSm+x6ETixtKZBh/qbREIOcI3cULzDyb18Sr/Wcyx7NRQeQzOiHTNbZFF1UwPS2scyGA==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "LGPL-3.0-or-later", + "optional": true, + "os": [ + "linux" + ], + "funding": { + "url": "https://opencollective.com/libvips" + } + }, + "node_modules/@img/sharp-libvips-linux-s390x": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linux-s390x/-/sharp-libvips-linux-s390x-1.2.4.tgz", + "integrity": "sha512-qmp9VrzgPgMoGZyPvrQHqk02uyjA0/QrTO26Tqk6l4ZV0MPWIW6LTkqOIov+J1yEu7MbFQaDpwdwJKhbJvuRxQ==", + "cpu": [ + "s390x" + ], + "dev": true, + "license": "LGPL-3.0-or-later", + "optional": true, + "os": [ + "linux" + ], + "funding": { + "url": "https://opencollective.com/libvips" + } + }, + "node_modules/@img/sharp-libvips-linux-x64": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linux-x64/-/sharp-libvips-linux-x64-1.2.4.tgz", + "integrity": "sha512-tJxiiLsmHc9Ax1bz3oaOYBURTXGIRDODBqhveVHonrHJ9/+k89qbLl0bcJns+e4t4rvaNBxaEZsFtSfAdquPrw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "LGPL-3.0-or-later", + "optional": true, + "os": [ + "linux" + ], + "funding": { + "url": "https://opencollective.com/libvips" + } + }, + "node_modules/@img/sharp-libvips-linuxmusl-arm64": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linuxmusl-arm64/-/sharp-libvips-linuxmusl-arm64-1.2.4.tgz", + "integrity": "sha512-FVQHuwx1IIuNow9QAbYUzJ+En8KcVm9Lk5+uGUQJHaZmMECZmOlix9HnH7n1TRkXMS0pGxIJokIVB9SuqZGGXw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "LGPL-3.0-or-later", + "optional": true, + "os": [ + "linux" + ], + "funding": { + "url": "https://opencollective.com/libvips" + } + }, + "node_modules/@img/sharp-libvips-linuxmusl-x64": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linuxmusl-x64/-/sharp-libvips-linuxmusl-x64-1.2.4.tgz", + "integrity": "sha512-+LpyBk7L44ZIXwz/VYfglaX/okxezESc6UxDSoyo2Ks6Jxc4Y7sGjpgU9s4PMgqgjj1gZCylTieNamqA1MF7Dg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "LGPL-3.0-or-later", + "optional": true, + "os": [ + "linux" + ], + "funding": { + "url": "https://opencollective.com/libvips" + } + }, + "node_modules/@img/sharp-linux-arm": { + "version": "0.34.5", + "resolved": "https://registry.npmjs.org/@img/sharp-linux-arm/-/sharp-linux-arm-0.34.5.tgz", + "integrity": "sha512-9dLqsvwtg1uuXBGZKsxem9595+ujv0sJ6Vi8wcTANSFpwV/GONat5eCkzQo/1O6zRIkh0m/8+5BjrRr7jDUSZw==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "Apache-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^18.17.0 || ^20.3.0 || >=21.0.0" + }, + "funding": { + "url": "https://opencollective.com/libvips" + }, + "optionalDependencies": { + "@img/sharp-libvips-linux-arm": "1.2.4" + } + }, + "node_modules/@img/sharp-linux-arm64": { + "version": "0.34.5", + "resolved": "https://registry.npmjs.org/@img/sharp-linux-arm64/-/sharp-linux-arm64-0.34.5.tgz", + "integrity": "sha512-bKQzaJRY/bkPOXyKx5EVup7qkaojECG6NLYswgktOZjaXecSAeCWiZwwiFf3/Y+O1HrauiE3FVsGxFg8c24rZg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "Apache-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^18.17.0 || ^20.3.0 || >=21.0.0" + }, + "funding": { + "url": "https://opencollective.com/libvips" + }, + "optionalDependencies": { + "@img/sharp-libvips-linux-arm64": "1.2.4" + } + }, + "node_modules/@img/sharp-linux-ppc64": { + "version": "0.34.5", + "resolved": "https://registry.npmjs.org/@img/sharp-linux-ppc64/-/sharp-linux-ppc64-0.34.5.tgz", + "integrity": "sha512-7zznwNaqW6YtsfrGGDA6BRkISKAAE1Jo0QdpNYXNMHu2+0dTrPflTLNkpc8l7MUP5M16ZJcUvysVWWrMefZquA==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "Apache-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^18.17.0 || ^20.3.0 || >=21.0.0" + }, + "funding": { + "url": "https://opencollective.com/libvips" + }, + "optionalDependencies": { + "@img/sharp-libvips-linux-ppc64": "1.2.4" + } + }, + "node_modules/@img/sharp-linux-riscv64": { + "version": "0.34.5", + "resolved": "https://registry.npmjs.org/@img/sharp-linux-riscv64/-/sharp-linux-riscv64-0.34.5.tgz", + "integrity": "sha512-51gJuLPTKa7piYPaVs8GmByo7/U7/7TZOq+cnXJIHZKavIRHAP77e3N2HEl3dgiqdD/w0yUfiJnII77PuDDFdw==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "Apache-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^18.17.0 || ^20.3.0 || >=21.0.0" + }, + "funding": { + "url": "https://opencollective.com/libvips" + }, + "optionalDependencies": { + "@img/sharp-libvips-linux-riscv64": "1.2.4" + } + }, + "node_modules/@img/sharp-linux-s390x": { + "version": "0.34.5", + "resolved": "https://registry.npmjs.org/@img/sharp-linux-s390x/-/sharp-linux-s390x-0.34.5.tgz", + "integrity": "sha512-nQtCk0PdKfho3eC5MrbQoigJ2gd1CgddUMkabUj+rBevs8tZ2cULOx46E7oyX+04WGfABgIwmMC0VqieTiR4jg==", + "cpu": [ + "s390x" + ], + "dev": true, + "license": "Apache-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^18.17.0 || ^20.3.0 || >=21.0.0" + }, + "funding": { + "url": "https://opencollective.com/libvips" + }, + "optionalDependencies": { + "@img/sharp-libvips-linux-s390x": "1.2.4" + } + }, + "node_modules/@img/sharp-linux-x64": { + "version": "0.34.5", + "resolved": "https://registry.npmjs.org/@img/sharp-linux-x64/-/sharp-linux-x64-0.34.5.tgz", + "integrity": "sha512-MEzd8HPKxVxVenwAa+JRPwEC7QFjoPWuS5NZnBt6B3pu7EG2Ge0id1oLHZpPJdn3OQK+BQDiw9zStiHBTJQQQQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "Apache-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^18.17.0 || ^20.3.0 || >=21.0.0" + }, + "funding": { + "url": "https://opencollective.com/libvips" + }, + "optionalDependencies": { + "@img/sharp-libvips-linux-x64": "1.2.4" + } + }, + "node_modules/@img/sharp-linuxmusl-arm64": { + "version": "0.34.5", + "resolved": "https://registry.npmjs.org/@img/sharp-linuxmusl-arm64/-/sharp-linuxmusl-arm64-0.34.5.tgz", + "integrity": "sha512-fprJR6GtRsMt6Kyfq44IsChVZeGN97gTD331weR1ex1c1rypDEABN6Tm2xa1wE6lYb5DdEnk03NZPqA7Id21yg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "Apache-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^18.17.0 || ^20.3.0 || >=21.0.0" + }, + "funding": { + "url": "https://opencollective.com/libvips" + }, + "optionalDependencies": { + "@img/sharp-libvips-linuxmusl-arm64": "1.2.4" + } + }, + "node_modules/@img/sharp-linuxmusl-x64": { + "version": "0.34.5", + "resolved": "https://registry.npmjs.org/@img/sharp-linuxmusl-x64/-/sharp-linuxmusl-x64-0.34.5.tgz", + "integrity": "sha512-Jg8wNT1MUzIvhBFxViqrEhWDGzqymo3sV7z7ZsaWbZNDLXRJZoRGrjulp60YYtV4wfY8VIKcWidjojlLcWrd8Q==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "Apache-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^18.17.0 || ^20.3.0 || >=21.0.0" + }, + "funding": { + "url": "https://opencollective.com/libvips" + }, + "optionalDependencies": { + "@img/sharp-libvips-linuxmusl-x64": "1.2.4" + } + }, + "node_modules/@img/sharp-wasm32": { + "version": "0.34.5", + "resolved": "https://registry.npmjs.org/@img/sharp-wasm32/-/sharp-wasm32-0.34.5.tgz", + "integrity": "sha512-OdWTEiVkY2PHwqkbBI8frFxQQFekHaSSkUIJkwzclWZe64O1X4UlUjqqqLaPbUpMOQk6FBu/HtlGXNblIs0huw==", + "cpu": [ + "wasm32" + ], + "dev": true, + "license": "Apache-2.0 AND LGPL-3.0-or-later AND MIT", + "optional": true, + "dependencies": { + "@emnapi/runtime": "^1.7.0" + }, + "engines": { + "node": "^18.17.0 || ^20.3.0 || >=21.0.0" + }, + "funding": { + "url": "https://opencollective.com/libvips" + } + }, + "node_modules/@img/sharp-win32-arm64": { + "version": "0.34.5", + "resolved": "https://registry.npmjs.org/@img/sharp-win32-arm64/-/sharp-win32-arm64-0.34.5.tgz", + "integrity": "sha512-WQ3AgWCWYSb2yt+IG8mnC6Jdk9Whs7O0gxphblsLvdhSpSTtmu69ZG1Gkb6NuvxsNACwiPV6cNSZNzt0KPsw7g==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "Apache-2.0 AND LGPL-3.0-or-later", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": "^18.17.0 || ^20.3.0 || >=21.0.0" + }, + "funding": { + "url": "https://opencollective.com/libvips" + } + }, + "node_modules/@img/sharp-win32-ia32": { + "version": "0.34.5", + "resolved": "https://registry.npmjs.org/@img/sharp-win32-ia32/-/sharp-win32-ia32-0.34.5.tgz", + "integrity": "sha512-FV9m/7NmeCmSHDD5j4+4pNI8Cp3aW+JvLoXcTUo0IqyjSfAZJ8dIUmijx1qaJsIiU+Hosw6xM5KijAWRJCSgNg==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "Apache-2.0 AND LGPL-3.0-or-later", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": "^18.17.0 || ^20.3.0 || >=21.0.0" + }, + "funding": { + "url": "https://opencollective.com/libvips" + } + }, + "node_modules/@img/sharp-win32-x64": { + "version": "0.34.5", + "resolved": "https://registry.npmjs.org/@img/sharp-win32-x64/-/sharp-win32-x64-0.34.5.tgz", + "integrity": "sha512-+29YMsqY2/9eFEiW93eqWnuLcWcufowXewwSNIT6UwZdUUCrM3oFjMWH/Z6/TMmb4hlFenmfAVbpWeup2jryCw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "Apache-2.0 AND LGPL-3.0-or-later", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": "^18.17.0 || ^20.3.0 || >=21.0.0" + }, + "funding": { + "url": "https://opencollective.com/libvips" + } + }, + "node_modules/@jridgewell/resolve-uri": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz", + "integrity": "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@jridgewell/sourcemap-codec": { + "version": "1.5.5", + "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.5.tgz", + "integrity": "sha512-cYQ9310grqxueWbl+WuIUIaiUaDcj7WOq5fVhEljNVgRfOUhY9fy2zTvfoqWsnebh8Sl70VScFbICvJnLKB0Og==", + "dev": true, + "license": "MIT" + }, + "node_modules/@jridgewell/trace-mapping": { + "version": "0.3.9", + "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.9.tgz", + "integrity": "sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/resolve-uri": "^3.0.3", + "@jridgewell/sourcemap-codec": "^1.4.10" + } + }, + "node_modules/@poppinss/colors": { + "version": "4.1.6", + "resolved": "https://registry.npmjs.org/@poppinss/colors/-/colors-4.1.6.tgz", + "integrity": "sha512-H9xkIdFswbS8n1d6vmRd8+c10t2Qe+rZITbbDHHkQixH5+2x1FDGmi/0K+WgWiqQFKPSlIYB7jlH6Kpfn6Fleg==", + "dev": true, + "license": "MIT", + "dependencies": { + "kleur": "^4.1.5" + } + }, + "node_modules/@poppinss/dumper": { + "version": "0.6.5", + "resolved": "https://registry.npmjs.org/@poppinss/dumper/-/dumper-0.6.5.tgz", + "integrity": "sha512-NBdYIb90J7LfOI32dOewKI1r7wnkiH6m920puQ3qHUeZkxNkQiFnXVWoE6YtFSv6QOiPPf7ys6i+HWWecDz7sw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@poppinss/colors": "^4.1.5", + "@sindresorhus/is": "^7.0.2", + "supports-color": "^10.0.0" + } + }, + "node_modules/@poppinss/exception": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/@poppinss/exception/-/exception-1.2.3.tgz", + "integrity": "sha512-dCED+QRChTVatE9ibtoaxc+WkdzOSjYTKi/+uacHWIsfodVfpsueo3+DKpgU5Px8qXjgmXkSvhXvSCz3fnP9lw==", + "dev": true, + "license": "MIT" + }, + "node_modules/@sindresorhus/is": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/@sindresorhus/is/-/is-7.2.0.tgz", + "integrity": "sha512-P1Cz1dWaFfR4IR+U13mqqiGsLFf1KbayybWwdd2vfctdV6hDpUkgCY0nKOLLTMSoRd/jJNjtbqzf13K8DCCXQw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sindresorhus/is?sponsor=1" + } + }, + "node_modules/@speed-highlight/core": { + "version": "1.2.15", + "resolved": "https://registry.npmjs.org/@speed-highlight/core/-/core-1.2.15.tgz", + "integrity": "sha512-BMq1K3DsElxDWawkX6eLg9+CKJrTVGCBAWVuHXVUV2u0s2711qiChLSId6ikYPfxhdYocLNt3wWwSvDiTvFabw==", + "dev": true, + "license": "CC0-1.0" + }, + "node_modules/blake3-wasm": { + "version": "2.1.5", + "resolved": "https://registry.npmjs.org/blake3-wasm/-/blake3-wasm-2.1.5.tgz", + "integrity": "sha512-F1+K8EbfOZE49dtoPtmxUQrpXaBIl3ICvasLh+nJta0xkz+9kF/7uet9fLnwKqhDrmj6g+6K3Tw9yQPUg2ka5g==", + "dev": true, + "license": "MIT" + }, + "node_modules/cookie": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/cookie/-/cookie-1.1.1.tgz", + "integrity": "sha512-ei8Aos7ja0weRpFzJnEA9UHJ/7XQmqglbRwnf2ATjcB9Wq874VKH9kfjjirM6UhU2/E5fFYadylyhFldcqSidQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" + } + }, + "node_modules/detect-libc": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-2.1.2.tgz", + "integrity": "sha512-Btj2BOOO83o3WyH59e8MgXsxEQVcarkUOpEYrubB0urwnN10yQ364rsiByU11nZlqWYZm05i/of7io4mzihBtQ==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": ">=8" + } + }, + "node_modules/error-stack-parser-es": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/error-stack-parser-es/-/error-stack-parser-es-1.0.5.tgz", + "integrity": "sha512-5qucVt2XcuGMcEGgWI7i+yZpmpByQ8J1lHhcL7PwqCwu9FPP3VUXzT4ltHe5i2z9dePwEHcDVOAfSnHsOlCXRA==", + "dev": true, + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/antfu" + } + }, + "node_modules/esbuild": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.27.3.tgz", + "integrity": "sha512-8VwMnyGCONIs6cWue2IdpHxHnAjzxnw2Zr7MkVxB2vjmQ2ivqGFb4LEG3SMnv0Gb2F/G/2yA8zUaiL1gywDCCg==", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "bin": { + "esbuild": "bin/esbuild" + }, + "engines": { + "node": ">=18" + }, + "optionalDependencies": { + "@esbuild/aix-ppc64": "0.27.3", + "@esbuild/android-arm": "0.27.3", + "@esbuild/android-arm64": "0.27.3", + "@esbuild/android-x64": "0.27.3", + "@esbuild/darwin-arm64": "0.27.3", + "@esbuild/darwin-x64": "0.27.3", + "@esbuild/freebsd-arm64": "0.27.3", + "@esbuild/freebsd-x64": "0.27.3", + "@esbuild/linux-arm": "0.27.3", + "@esbuild/linux-arm64": "0.27.3", + "@esbuild/linux-ia32": "0.27.3", + "@esbuild/linux-loong64": "0.27.3", + "@esbuild/linux-mips64el": "0.27.3", + "@esbuild/linux-ppc64": "0.27.3", + "@esbuild/linux-riscv64": "0.27.3", + "@esbuild/linux-s390x": "0.27.3", + "@esbuild/linux-x64": "0.27.3", + "@esbuild/netbsd-arm64": "0.27.3", + "@esbuild/netbsd-x64": "0.27.3", + "@esbuild/openbsd-arm64": "0.27.3", + "@esbuild/openbsd-x64": "0.27.3", + "@esbuild/openharmony-arm64": "0.27.3", + "@esbuild/sunos-x64": "0.27.3", + "@esbuild/win32-arm64": "0.27.3", + "@esbuild/win32-ia32": "0.27.3", + "@esbuild/win32-x64": "0.27.3" + } + }, + "node_modules/fsevents": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", + "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^8.16.0 || ^10.6.0 || >=11.0.0" + } + }, + "node_modules/kleur": { + "version": "4.1.5", + "resolved": "https://registry.npmjs.org/kleur/-/kleur-4.1.5.tgz", + "integrity": "sha512-o+NO+8WrRiQEE4/7nwRJhN1HWpVmJm511pBHUxPLtp0BUISzlBplORYSmTclCnJvQq2tKu/sgl3xVpkc7ZWuQQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/miniflare": { + "version": "4.20260420.0", + "resolved": "https://registry.npmjs.org/miniflare/-/miniflare-4.20260420.0.tgz", + "integrity": "sha512-w8s3eh2W7EEsFh2uGdddZLkbTwiPI8MCSMXKtuLSA9btW8xmQsVVSkrFuLXFyTKcX0QkstS5dhcWjQPQRJ2WKg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@cspotcode/source-map-support": "0.8.1", + "sharp": "^0.34.5", + "undici": "7.24.8", + "workerd": "1.20260420.1", + "ws": "8.18.0", + "youch": "4.1.0-beta.10" + }, + "bin": { + "miniflare": "bootstrap.js" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/path-to-regexp": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-6.3.0.tgz", + "integrity": "sha512-Yhpw4T9C6hPpgPeA28us07OJeqZ5EzQTkbfwuhsUg0c237RomFoETJgmp2sa3F/41gfLE6G5cqcYwznmeEeOlQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/pathe": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/pathe/-/pathe-2.0.3.tgz", + "integrity": "sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w==", + "dev": true, + "license": "MIT" + }, + "node_modules/semver": { + "version": "7.7.4", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.4.tgz", + "integrity": "sha512-vFKC2IEtQnVhpT78h1Yp8wzwrf8CM+MzKMHGJZfBtzhZNycRFnXsHk6E5TxIkkMsgNS7mdX3AGB7x2QM2di4lA==", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/sharp": { + "version": "0.34.5", + "resolved": "https://registry.npmjs.org/sharp/-/sharp-0.34.5.tgz", + "integrity": "sha512-Ou9I5Ft9WNcCbXrU9cMgPBcCK8LiwLqcbywW3t4oDV37n1pzpuNLsYiAV8eODnjbtQlSDwZ2cUEeQz4E54Hltg==", + "dev": true, + "hasInstallScript": true, + "license": "Apache-2.0", + "dependencies": { + "@img/colour": "^1.0.0", + "detect-libc": "^2.1.2", + "semver": "^7.7.3" + }, + "engines": { + "node": "^18.17.0 || ^20.3.0 || >=21.0.0" + }, + "funding": { + "url": "https://opencollective.com/libvips" + }, + "optionalDependencies": { + "@img/sharp-darwin-arm64": "0.34.5", + "@img/sharp-darwin-x64": "0.34.5", + "@img/sharp-libvips-darwin-arm64": "1.2.4", + "@img/sharp-libvips-darwin-x64": "1.2.4", + "@img/sharp-libvips-linux-arm": "1.2.4", + "@img/sharp-libvips-linux-arm64": "1.2.4", + "@img/sharp-libvips-linux-ppc64": "1.2.4", + "@img/sharp-libvips-linux-riscv64": "1.2.4", + "@img/sharp-libvips-linux-s390x": "1.2.4", + "@img/sharp-libvips-linux-x64": "1.2.4", + "@img/sharp-libvips-linuxmusl-arm64": "1.2.4", + "@img/sharp-libvips-linuxmusl-x64": "1.2.4", + "@img/sharp-linux-arm": "0.34.5", + "@img/sharp-linux-arm64": "0.34.5", + "@img/sharp-linux-ppc64": "0.34.5", + "@img/sharp-linux-riscv64": "0.34.5", + "@img/sharp-linux-s390x": "0.34.5", + "@img/sharp-linux-x64": "0.34.5", + "@img/sharp-linuxmusl-arm64": "0.34.5", + "@img/sharp-linuxmusl-x64": "0.34.5", + "@img/sharp-wasm32": "0.34.5", + "@img/sharp-win32-arm64": "0.34.5", + "@img/sharp-win32-ia32": "0.34.5", + "@img/sharp-win32-x64": "0.34.5" + } + }, + "node_modules/supports-color": { + "version": "10.2.2", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-10.2.2.tgz", + "integrity": "sha512-SS+jx45GF1QjgEXQx4NJZV9ImqmO2NPz5FNsIHrsDjh2YsHnawpan7SNQ1o8NuhrbHZy9AZhIoCUiCeaW/C80g==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/chalk/supports-color?sponsor=1" + } + }, + "node_modules/tslib": { + "version": "2.8.1", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz", + "integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==", + "dev": true, + "license": "0BSD", + "optional": true + }, + "node_modules/typescript": { + "version": "5.9.3", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.9.3.tgz", + "integrity": "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==", + "dev": true, + "license": "Apache-2.0", + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, + "engines": { + "node": ">=14.17" + } + }, + "node_modules/undici": { + "version": "7.24.8", + "resolved": "https://registry.npmjs.org/undici/-/undici-7.24.8.tgz", + "integrity": "sha512-6KQ/+QxK49Z/p3HO6E5ZCZWNnCasyZLa5ExaVYyvPxUwKtbCPMKELJOqh7EqOle0t9cH/7d2TaaTRRa6Nhs4YQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=20.18.1" + } + }, + "node_modules/unenv": { + "version": "2.0.0-rc.24", + "resolved": "https://registry.npmjs.org/unenv/-/unenv-2.0.0-rc.24.tgz", + "integrity": "sha512-i7qRCmY42zmCwnYlh9H2SvLEypEFGye5iRmEMKjcGi7zk9UquigRjFtTLz0TYqr0ZGLZhaMHl/foy1bZR+Cwlw==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "pathe": "^2.0.3" + } + }, + "node_modules/workerd": { + "version": "1.20260420.1", + "resolved": "https://registry.npmjs.org/workerd/-/workerd-1.20260420.1.tgz", + "integrity": "sha512-1AOJgng169u4fiFrEd5WjrAGpdwd3A4ZJtP8PMvf+RF9NUKy+mdwrKdz4qPZ6Tt/Bya99vsLn6UX33fjAEVoaA==", + "dev": true, + "hasInstallScript": true, + "license": "Apache-2.0", + "bin": { + "workerd": "bin/workerd" + }, + "engines": { + "node": ">=16" + }, + "optionalDependencies": { + "@cloudflare/workerd-darwin-64": "1.20260420.1", + "@cloudflare/workerd-darwin-arm64": "1.20260420.1", + "@cloudflare/workerd-linux-64": "1.20260420.1", + "@cloudflare/workerd-linux-arm64": "1.20260420.1", + "@cloudflare/workerd-windows-64": "1.20260420.1" + } + }, + "node_modules/wrangler": { + "version": "4.84.0", + "resolved": "https://registry.npmjs.org/wrangler/-/wrangler-4.84.0.tgz", + "integrity": "sha512-lYScYXeHZ385rDzbTF7QfP4FWu2vQuD7uDQRUjDZuutyq5fZVCR6ZxLLsySbqFiFjvKsF5RoxVPeJtI78blz4w==", + "dev": true, + "license": "MIT OR Apache-2.0", + "dependencies": { + "@cloudflare/kv-asset-handler": "0.4.2", + "@cloudflare/unenv-preset": "2.16.0", + "blake3-wasm": "2.1.5", + "esbuild": "0.27.3", + "miniflare": "4.20260420.0", + "path-to-regexp": "6.3.0", + "unenv": "2.0.0-rc.24", + "workerd": "1.20260420.1" + }, + "bin": { + "wrangler": "bin/wrangler.js", + "wrangler2": "bin/wrangler.js" + }, + "engines": { + "node": ">=20.3.0" + }, + "optionalDependencies": { + "fsevents": "~2.3.2" + }, + "peerDependencies": { + "@cloudflare/workers-types": "^4.20260420.1" + }, + "peerDependenciesMeta": { + "@cloudflare/workers-types": { + "optional": true + } + } + }, + "node_modules/ws": { + "version": "8.18.0", + "resolved": "https://registry.npmjs.org/ws/-/ws-8.18.0.tgz", + "integrity": "sha512-8VbfWfHLbbwu3+N6OKsOMpBdT4kXPDDB9cJk2bJ6mh9ucxdlnNvH1e+roYkKmN9Nxw2yjz7VzeO9oOz2zJ04Pw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10.0.0" + }, + "peerDependencies": { + "bufferutil": "^4.0.1", + "utf-8-validate": ">=5.0.2" + }, + "peerDependenciesMeta": { + "bufferutil": { + "optional": true + }, + "utf-8-validate": { + "optional": true + } + } + }, + "node_modules/youch": { + "version": "4.1.0-beta.10", + "resolved": "https://registry.npmjs.org/youch/-/youch-4.1.0-beta.10.tgz", + "integrity": "sha512-rLfVLB4FgQneDr0dv1oddCVZmKjcJ6yX6mS4pU82Mq/Dt9a3cLZQ62pDBL4AUO+uVrCvtWz3ZFUL2HFAFJ/BXQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@poppinss/colors": "^4.1.5", + "@poppinss/dumper": "^0.6.4", + "@speed-highlight/core": "^1.2.7", + "cookie": "^1.0.2", + "youch-core": "^0.3.3" + } + }, + "node_modules/youch-core": { + "version": "0.3.3", + "resolved": "https://registry.npmjs.org/youch-core/-/youch-core-0.3.3.tgz", + "integrity": "sha512-ho7XuGjLaJ2hWHoK8yFnsUGy2Y5uDpqSTq1FkHLK4/oqKtyUU1AFbOOxY4IpC9f0fTLjwYbslUz0Po5BpD1wrA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@poppinss/exception": "^1.2.2", + "error-stack-parser-es": "^1.0.5" + } + } + } +} diff --git a/cloudflare-license/package.json b/cloudflare-license/package.json new file mode 100644 index 0000000..d18e542 --- /dev/null +++ b/cloudflare-license/package.json @@ -0,0 +1,17 @@ +{ + "name": "flowkit-license-worker", + "version": "0.1.0", + "private": true, + "type": "module", + "scripts": { + "dev": "wrangler dev", + "deploy": "wrangler deploy", + "d1:migrate:local": "wrangler d1 migrations apply flowkit_license --local", + "d1:migrate:remote": "wrangler d1 migrations apply flowkit_license --remote" + }, + "devDependencies": { + "@cloudflare/workers-types": "^4.20260415.0", + "typescript": "^5.6.3", + "wrangler": "^4.13.2" + } +} diff --git a/cloudflare-license/src/index.ts b/cloudflare-license/src/index.ts new file mode 100644 index 0000000..afb5604 --- /dev/null +++ b/cloudflare-license/src/index.ts @@ -0,0 +1,1273 @@ +interface Env { + LICENSE_DB: D1Database + ADMIN_TOKEN?: string + ADMIN_USERNAME?: string + ADMIN_PASSWORD?: string + SESSION_SECRET?: string + CORS_ORIGIN?: string +} + +type PlanCode = 'TRIAL_3D' | '1M' | '3M' | '6M' | '1Y' | 'LIFE' + +interface LicenseRow { + id: number + machine_hash: string + plan_code: PlanCode + status: string + activated_at: string + expires_at: string | null + note: string | null + revoked_at: string | null + revoked_reason: string | null +} + +const JSON_CONTENT_TYPE = 'application/json; charset=utf-8' +const SHA256_RE = /^[a-f0-9]{64}$/i +const SESSION_COOKIE_NAME = 'flowkit_admin_session' +const SESSION_TTL_SECONDS = 60 * 60 * 12 +const textEncoder = new TextEncoder() + +const PLAN_ALIAS: Record = { + 'TRIAL_3D': 'TRIAL_3D', + 'TRIAL': 'TRIAL_3D', + 'TRIAL3D': 'TRIAL_3D', + '3D': 'TRIAL_3D', + '3DAY': 'TRIAL_3D', + '3DAYS': 'TRIAL_3D', + 'TRIAL_3_DAYS': 'TRIAL_3D', + 'TRIAL-3DAYS': 'TRIAL_3D', + '1M': '1M', + '1_MONTH': '1M', + 'MONTH_1': '1M', + '3M': '3M', + '3_MONTH': '3M', + 'MONTH_3': '3M', + '6M': '6M', + '6_MONTH': '6M', + 'MONTH_6': '6M', + '1Y': '1Y', + '12M': '1Y', + '1_YEAR': '1Y', + 'YEAR_1': '1Y', + 'LIFE': 'LIFE', + 'LIFETIME': 'LIFE', + 'FOREVER': 'LIFE', +} + +const PLAN_LABEL: Record = { + 'TRIAL_3D': 'Trial - 3 ngày', + '1M': '1 tháng', + '3M': '3 tháng', + '6M': '6 tháng', + '1Y': '1 năm', + 'LIFE': 'Trọn đời', +} + +function nowIso(): string { + return new Date().toISOString() +} + +function normalizeMachineId(value: unknown): string { + if (typeof value !== 'string') return '' + return value.trim().toUpperCase().replace(/\s+/g, '') +} + +function normalizePlan(value: unknown): PlanCode | null { + if (typeof value !== 'string') return null + const key = value.trim().toUpperCase() + return PLAN_ALIAS[key] ?? null +} + +function machineHint(machineId: string): string { + if (!machineId) return 'UNKNOWN' + if (machineId.length <= 12) return machineId + return `${machineId.slice(0, 8)}...${machineId.slice(-6)}` +} + +function planExpiresAt(plan: PlanCode, activatedAt: string): string | null { + if (plan === 'LIFE') return null + if (plan === 'TRIAL_3D') { + const trialEnd = new Date(activatedAt) + trialEnd.setUTCDate(trialEnd.getUTCDate() + 3) + return trialEnd.toISOString() + } + const months = plan === '1M' ? 1 : plan === '3M' ? 3 : plan === '6M' ? 6 : 12 + const base = new Date(activatedAt) + const originalDay = base.getUTCDate() + base.setUTCMonth(base.getUTCMonth() + months) + if (base.getUTCDate() < originalDay) { + base.setUTCDate(0) + } + return base.toISOString() +} + +function resolveCorsOrigin(req: Request, env: Env): string { + const configured = (env.CORS_ORIGIN ?? '*').trim() + if (!configured || configured === '*') return '*' + const allowed = configured.split(',').map((v) => v.trim()).filter(Boolean) + const requestOrigin = req.headers.get('Origin') ?? '' + if (requestOrigin && allowed.includes(requestOrigin)) return requestOrigin + return allowed[0] ?? '*' +} + +function withCors(req: Request, env: Env, headers?: Headers): Headers { + const result = headers ?? new Headers() + const origin = resolveCorsOrigin(req, env) + result.set('Access-Control-Allow-Origin', origin) + result.set('Access-Control-Allow-Headers', 'authorization, content-type, x-admin-user') + result.set('Access-Control-Allow-Methods', 'GET, POST, OPTIONS') + result.set('Vary', 'Origin') + return result +} + +function json(req: Request, env: Env, payload: unknown, status = 200): Response { + const headers = withCors(req, env) + headers.set('Content-Type', JSON_CONTENT_TYPE) + return new Response(JSON.stringify(payload), { status, headers }) +} + +function html(req: Request, env: Env, body: string, status = 200): Response { + const headers = withCors(req, env) + headers.set('Content-Type', 'text/html; charset=utf-8') + headers.set('Cache-Control', 'no-store, no-cache, must-revalidate, max-age=0') + headers.set('Pragma', 'no-cache') + headers.set('Expires', '0') + return new Response(body, { status, headers }) +} + +function unauthorized(req: Request, env: Env): Response { + return json(req, env, { error: 'UNAUTHORIZED' }, 401) +} + +function hasPasswordLoginConfigured(env: Env): boolean { + return Boolean((env.ADMIN_USERNAME ?? '').trim() && (env.ADMIN_PASSWORD ?? '').trim()) +} + +function getSessionSecret(env: Env): string { + return (env.SESSION_SECRET ?? env.ADMIN_TOKEN ?? '').trim() +} + +function normalizeBase64Url(input: string): string { + return input.replace(/\+/g, '-').replace(/\//g, '_').replace(/=+$/g, '') +} + +function toBase64UrlFromBytes(bytes: Uint8Array): string { + let binary = '' + bytes.forEach((b) => { binary += String.fromCharCode(b) }) + return normalizeBase64Url(btoa(binary)) +} + +function toBase64UrlFromText(value: string): string { + return toBase64UrlFromBytes(textEncoder.encode(value)) +} + +function fromBase64UrlToText(value: string): string | null { + try { + const base64 = value.replace(/-/g, '+').replace(/_/g, '/') + const padded = base64 + '='.repeat((4 - (base64.length % 4 || 4)) % 4) + const raw = atob(padded) + const bytes = new Uint8Array(raw.length) + for (let i = 0; i < raw.length; i += 1) bytes[i] = raw.charCodeAt(i) + return new TextDecoder().decode(bytes) + } catch { + return null + } +} + +async function signHmacSha256(secret: string, data: string): Promise { + const key = await crypto.subtle.importKey( + 'raw', + textEncoder.encode(secret), + { name: 'HMAC', hash: 'SHA-256' }, + false, + ['sign'], + ) + const signature = await crypto.subtle.sign('HMAC', key, textEncoder.encode(data)) + return toBase64UrlFromBytes(new Uint8Array(signature)) +} + +function timingSafeEqual(a: string, b: string): boolean { + if (a.length !== b.length) return false + let diff = 0 + for (let i = 0; i < a.length; i += 1) { + diff |= a.charCodeAt(i) ^ b.charCodeAt(i) + } + return diff === 0 +} + +function parseCookies(req: Request): Record { + const raw = req.headers.get('Cookie') ?? '' + const pairs = raw.split(';').map((v) => v.trim()).filter(Boolean) + const result: Record = {} + pairs.forEach((entry) => { + const idx = entry.indexOf('=') + if (idx <= 0) return + const key = entry.slice(0, idx).trim() + const val = entry.slice(idx + 1).trim() + result[key] = decodeURIComponent(val) + }) + return result +} + +interface SessionPayload { + u: string + iat: number + exp: number +} + +async function createSessionToken(env: Env, username: string): Promise { + const secret = getSessionSecret(env) + if (!secret) return null + const now = Math.floor(Date.now() / 1000) + const payload: SessionPayload = { + u: username, + iat: now, + exp: now + SESSION_TTL_SECONDS, + } + const payloadB64 = toBase64UrlFromText(JSON.stringify(payload)) + const signature = await signHmacSha256(secret, payloadB64) + return `${payloadB64}.${signature}` +} + +async function verifySessionToken(env: Env, token: string): Promise { + const secret = getSessionSecret(env) + if (!secret) return null + const [payloadB64, signature] = token.split('.') + if (!payloadB64 || !signature) return null + const expected = await signHmacSha256(secret, payloadB64) + if (!timingSafeEqual(expected, signature)) return null + const decoded = fromBase64UrlToText(payloadB64) + if (!decoded) return null + try { + const payload = JSON.parse(decoded) as Partial + const username = (payload.u ?? '').trim() + const exp = Number(payload.exp ?? 0) + const iat = Number(payload.iat ?? 0) + if (!username || Number.isNaN(exp) || Number.isNaN(iat)) return null + if (Math.floor(Date.now() / 1000) >= exp) return null + return { u: username, exp, iat } + } catch { + return null + } +} + +async function getSessionFromRequest(req: Request, env: Env): Promise { + const token = parseCookies(req)[SESSION_COOKIE_NAME] + if (!token) return null + return verifySessionToken(env, token) +} + +function buildSessionCookie(req: Request, token: string): string { + const isSecure = new URL(req.url).protocol === 'https:' + const parts = [ + `${SESSION_COOKIE_NAME}=${encodeURIComponent(token)}`, + 'Path=/', + `Max-Age=${SESSION_TTL_SECONDS}`, + 'HttpOnly', + 'SameSite=Strict', + ] + if (isSecure) parts.push('Secure') + return parts.join('; ') +} + +function buildClearSessionCookie(req: Request): string { + const isSecure = new URL(req.url).protocol === 'https:' + const parts = [ + `${SESSION_COOKIE_NAME}=`, + 'Path=/', + 'Max-Age=0', + 'HttpOnly', + 'SameSite=Strict', + ] + if (isSecure) parts.push('Secure') + return parts.join('; ') +} + +async function isAdminAuthorized(req: Request, env: Env): Promise { + const session = await getSessionFromRequest(req, env) + if (hasPasswordLoginConfigured(env)) { + return Boolean(session && session.u === (env.ADMIN_USERNAME ?? '').trim()) + } + + const expected = (env.ADMIN_TOKEN ?? '').trim() + const auth = req.headers.get('Authorization') ?? '' + const token = auth.replace(/^Bearer\s+/i, '').trim() + if (expected && token.length > 0 && token === expected) return true + + return Boolean(session) +} + +async function sha256Hex(value: string): Promise { + const bytes = new TextEncoder().encode(value) + const digest = await crypto.subtle.digest('SHA-256', bytes) + return Array.from(new Uint8Array(digest)) + .map((b) => b.toString(16).padStart(2, '0')) + .join('') +} + +async function machineHashFromInput(machineIdInput: string): Promise { + if (SHA256_RE.test(machineIdInput)) { + return machineIdInput.toLowerCase() + } + return sha256Hex(machineIdInput) +} + +async function parseJsonBody(req: Request): Promise { + try { + return (await req.json()) as T + } catch { + return null + } +} + +async function upsertDevice( + env: Env, + machineHash: string, + hint: string, + machineId?: string, + appVersion?: string, + platform?: string, +): Promise { + const ts = nowIso() + await env.LICENSE_DB.prepare( + ` + INSERT INTO devices ( + machine_hash, machine_hint, machine_id, first_seen, last_seen, last_app_version, last_platform + ) VALUES (?, ?, ?, ?, ?, ?, ?) + ON CONFLICT(machine_hash) DO UPDATE SET + machine_hint = excluded.machine_hint, + machine_id = excluded.machine_id, + last_seen = excluded.last_seen, + last_app_version = excluded.last_app_version, + last_platform = excluded.last_platform + `, + ) + .bind(machineHash, hint, machineId ?? null, ts, ts, appVersion ?? null, platform ?? null) + .run() +} + +async function getActiveLicense(env: Env, machineHash: string): Promise { + return env.LICENSE_DB.prepare( + ` + SELECT id, machine_hash, plan_code, status, activated_at, expires_at, note, revoked_at, revoked_reason + FROM licenses + WHERE machine_hash = ? AND status = 'ACTIVE' + ORDER BY id DESC + LIMIT 1 + `, + ) + .bind(machineHash) + .first() +} + +async function getLatestLicense(env: Env, machineHash: string): Promise { + return env.LICENSE_DB.prepare( + ` + SELECT id, machine_hash, plan_code, status, activated_at, expires_at, note, revoked_at, revoked_reason + FROM licenses + WHERE machine_hash = ? + ORDER BY id DESC + LIMIT 1 + `, + ) + .bind(machineHash) + .first() +} + +function isExpired(expiresAt: string | null, referenceIso: string): boolean { + if (!expiresAt) return false + return new Date(expiresAt).getTime() <= new Date(referenceIso).getTime() +} + +async function audit(env: Env, actor: string, action: string, machineHash?: string | null, detail?: unknown): Promise { + await env.LICENSE_DB.prepare( + ` + INSERT INTO audit_logs (actor, action, machine_hash, detail_json, created_at) + VALUES (?, ?, ?, ?, ?) + `, + ) + .bind(actor, action, machineHash ?? null, detail ? JSON.stringify(detail) : null, nowIso()) + .run() +} + +async function handleDeviceCheck(req: Request, env: Env): Promise { + const body = await parseJsonBody<{ + machine_id?: string + app_version?: string + platform?: string + }>(req) + const machineId = normalizeMachineId(body?.machine_id) + if (!machineId || machineId.length < 8) { + return json(req, env, { error: 'INVALID_MACHINE_ID' }, 400) + } + + const hash = await machineHashFromInput(machineId) + await upsertDevice(env, hash, machineHint(machineId), machineId, body?.app_version, body?.platform) + + const currentTime = nowIso() + let active = await getActiveLicense(env, hash) + + if (active && isExpired(active.expires_at, currentTime)) { + await env.LICENSE_DB.prepare( + ` + UPDATE licenses + SET status = 'EXPIRED' + WHERE id = ? AND status = 'ACTIVE' + `, + ).bind(active.id).run() + await audit(env, 'system', 'license_expired', hash, { license_id: active.id }) + active = null + } + + if (active) { + return json(req, env, { + allowed: true, + status: 'ACTIVE', + plan_code: active.plan_code, + plan_label: PLAN_LABEL[active.plan_code], + activated_at: active.activated_at, + expires_at: active.expires_at, + revoked_reason: null, + machine_hash: hash, + server_time: currentTime, + }) + } + + const latest = await getLatestLicense(env, hash) + const fallbackStatus = latest?.status ?? 'PENDING' + const message = fallbackStatus === 'REVOKED' + ? latest?.revoked_reason + ? `License đã bị thu hồi: ${latest.revoked_reason}` + : 'License đã bị thu hồi. Liên hệ quản trị viên.' + : fallbackStatus === 'EXPIRED' + ? 'License đã hết hạn. Vui lòng gia hạn.' + : 'Thiết bị chưa được active trong CMS.' + + return json(req, env, { + allowed: false, + status: fallbackStatus, + plan_code: latest?.plan_code ?? null, + plan_label: latest?.plan_code ? PLAN_LABEL[latest.plan_code] : null, + activated_at: latest?.activated_at ?? null, + expires_at: latest?.expires_at ?? null, + revoked_reason: latest?.revoked_reason ?? null, + machine_hash: hash, + server_time: currentTime, + message, + }) +} + +async function handleAdminListDevices(req: Request, env: Env): Promise { + const url = new URL(req.url) + const limit = Math.max(1, Math.min(500, Number.parseInt(url.searchParams.get('limit') ?? '200', 10) || 200)) + const { results } = await env.LICENSE_DB.prepare( + ` + SELECT + d.machine_hash, + d.machine_hint, + d.machine_id, + d.first_seen, + d.last_seen, + d.last_app_version, + d.last_platform, + l.plan_code, + l.status AS license_status, + l.activated_at, + l.expires_at, + l.revoked_at, + l.revoked_reason + FROM devices d + LEFT JOIN licenses l ON l.id = ( + SELECT id FROM licenses + WHERE machine_hash = d.machine_hash + ORDER BY id DESC + LIMIT 1 + ) + ORDER BY d.last_seen DESC + LIMIT ? + `, + ) + .bind(limit) + .all() + + return json(req, env, { + items: results ?? [], + total: results?.length ?? 0, + }) +} + +async function handleAdminListLicenses(req: Request, env: Env): Promise { + const url = new URL(req.url) + const limit = Math.max(1, Math.min(1000, Number.parseInt(url.searchParams.get('limit') ?? '300', 10) || 300)) + const { results } = await env.LICENSE_DB.prepare( + ` + SELECT + id, + machine_hash, + plan_code, + status, + activated_at, + expires_at, + created_by, + note, + revoked_at, + revoked_reason + FROM licenses + ORDER BY id DESC + LIMIT ? + `, + ) + .bind(limit) + .all() + + return json(req, env, { + items: results ?? [], + total: results?.length ?? 0, + }) +} + +async function handleAdminActivate(req: Request, env: Env): Promise { + const body = await parseJsonBody<{ + machine_id?: string + plan?: string + note?: string + actor?: string + }>(req) + const machineId = normalizeMachineId(body?.machine_id) + if (!machineId) return json(req, env, { error: 'MACHINE_ID_REQUIRED' }, 400) + + const plan = normalizePlan(body?.plan) + if (!plan) return json(req, env, { error: 'INVALID_PLAN' }, 400) + + const hash = await machineHashFromInput(machineId) + const hint = SHA256_RE.test(machineId) ? `${machineId.slice(0, 8)}...` : machineHint(machineId) + const actor = (body?.actor?.trim() || req.headers.get('x-admin-user') || 'admin').slice(0, 120) + const activatedAt = nowIso() + const expiresAt = planExpiresAt(plan, activatedAt) + + await upsertDevice(env, hash, hint, machineId) + await env.LICENSE_DB.prepare( + ` + UPDATE licenses + SET status = 'REVOKED', revoked_at = ?, revoked_reason = 'Replaced by new activation' + WHERE machine_hash = ? AND status = 'ACTIVE' + `, + ) + .bind(activatedAt, hash) + .run() + + const inserted = await env.LICENSE_DB.prepare( + ` + INSERT INTO licenses ( + machine_hash, plan_code, status, activated_at, expires_at, created_by, note + ) VALUES (?, ?, 'ACTIVE', ?, ?, ?, ?) + RETURNING id, machine_hash, plan_code, status, activated_at, expires_at, created_by, note + `, + ) + .bind(hash, plan, activatedAt, expiresAt, actor, body?.note?.trim() || null) + .first>() + + await audit(env, actor, 'license_activate', hash, { + plan_code: plan, + expires_at: expiresAt, + note: body?.note ?? null, + }) + + return json(req, env, { + ok: true, + item: inserted, + }) +} + +async function handleAdminRevoke(req: Request, env: Env): Promise { + const body = await parseJsonBody<{ + machine_id?: string + reason?: string + actor?: string + }>(req) + const machineId = normalizeMachineId(body?.machine_id) + if (!machineId) return json(req, env, { error: 'MACHINE_ID_REQUIRED' }, 400) + + const hash = await machineHashFromInput(machineId) + const actor = (body?.actor?.trim() || req.headers.get('x-admin-user') || 'admin').slice(0, 120) + const reason = body?.reason?.trim() || 'Revoked by admin' + const revokedAt = nowIso() + + const result = await env.LICENSE_DB.prepare( + ` + UPDATE licenses + SET status = 'REVOKED', revoked_at = ?, revoked_reason = ? + WHERE machine_hash = ? AND status = 'ACTIVE' + `, + ) + .bind(revokedAt, reason, hash) + .run() + + await audit(env, actor, 'license_revoke', hash, { + reason, + changed: result.meta.changes, + }) + + return json(req, env, { + ok: true, + changed: result.meta.changes, + }) +} + +async function handleAdminSession(req: Request, env: Env): Promise { + const session = await getSessionFromRequest(req, env) + if (!session) { + return json(req, env, { authenticated: false }) + } + return json(req, env, { + authenticated: true, + username: session.u, + }) +} + +async function handleAdminLogin(req: Request, env: Env): Promise { + if (!hasPasswordLoginConfigured(env)) { + return json(req, env, { error: 'ADMIN_LOGIN_NOT_CONFIGURED' }, 503) + } + + const body = await parseJsonBody<{ username?: string; password?: string }>(req) + const username = (body?.username ?? '').trim() + const password = (body?.password ?? '').trim() + const expectedUsername = (env.ADMIN_USERNAME ?? '').trim() + const expectedPassword = (env.ADMIN_PASSWORD ?? '').trim() + + if (!username || !password) { + return json(req, env, { error: 'USERNAME_PASSWORD_REQUIRED' }, 400) + } + if (username !== expectedUsername || password !== expectedPassword) { + return json(req, env, { error: 'INVALID_CREDENTIALS' }, 401) + } + + const token = await createSessionToken(env, username) + if (!token) { + return json(req, env, { error: 'SESSION_SECRET_NOT_CONFIGURED' }, 503) + } + + const headers = withCors(req, env) + headers.set('Content-Type', JSON_CONTENT_TYPE) + headers.append('Set-Cookie', buildSessionCookie(req, token)) + return new Response(JSON.stringify({ ok: true, username }), { status: 200, headers }) +} + +function handleAdminLogout(req: Request, env: Env): Response { + const headers = withCors(req, env) + headers.set('Content-Type', JSON_CONTENT_TYPE) + headers.append('Set-Cookie', buildClearSessionCookie(req)) + return new Response(JSON.stringify({ ok: true }), { status: 200, headers }) +} + +function renderAdminHtml(): string { + return ` + + + + + FlowKit License CMS + + + + + + + + + +` +} + +export default { + async fetch(req: Request, env: Env): Promise { + const url = new URL(req.url) + if (req.method === 'OPTIONS') { + return new Response(null, { status: 204, headers: withCors(req, env) }) + } + + if (url.pathname === '/' || url.pathname === '/admin') { + return html(req, env, renderAdminHtml()) + } + + if (url.pathname === '/v1/health' && req.method === 'GET') { + return json(req, env, { + ok: true, + service: 'flowkit-license', + time: nowIso(), + }) + } + + if (url.pathname === '/v1/device/check' && req.method === 'POST') { + return handleDeviceCheck(req, env) + } + + if (!url.pathname.startsWith('/v1/admin/')) { + return json(req, env, { error: 'NOT_FOUND' }, 404) + } + + if (url.pathname === '/v1/admin/login' && req.method === 'POST') { + return handleAdminLogin(req, env) + } + if (url.pathname === '/v1/admin/logout' && req.method === 'POST') { + return handleAdminLogout(req, env) + } + if (url.pathname === '/v1/admin/session' && req.method === 'GET') { + return handleAdminSession(req, env) + } + + if (!(await isAdminAuthorized(req, env))) { + return unauthorized(req, env) + } + + if (url.pathname === '/v1/admin/devices' && req.method === 'GET') { + return handleAdminListDevices(req, env) + } + if (url.pathname === '/v1/admin/licenses' && req.method === 'GET') { + return handleAdminListLicenses(req, env) + } + if (url.pathname === '/v1/admin/licenses/activate' && req.method === 'POST') { + return handleAdminActivate(req, env) + } + if (url.pathname === '/v1/admin/licenses/revoke' && req.method === 'POST') { + return handleAdminRevoke(req, env) + } + + return json(req, env, { error: 'NOT_FOUND' }, 404) + }, +} diff --git a/cloudflare-license/tsconfig.json b/cloudflare-license/tsconfig.json new file mode 100644 index 0000000..604a276 --- /dev/null +++ b/cloudflare-license/tsconfig.json @@ -0,0 +1,13 @@ +{ + "compilerOptions": { + "target": "ES2022", + "module": "ESNext", + "moduleResolution": "Bundler", + "lib": ["ES2022"], + "types": ["@cloudflare/workers-types"], + "strict": true, + "skipLibCheck": true, + "noEmit": true + }, + "include": ["src/**/*"] +} diff --git a/cloudflare-license/wrangler.toml b/cloudflare-license/wrangler.toml new file mode 100644 index 0000000..164da27 --- /dev/null +++ b/cloudflare-license/wrangler.toml @@ -0,0 +1,12 @@ +name = "flowkit-license" +main = "src/index.ts" +compatibility_date = "2026-04-21" +account_id = "cf907123cee7dcfd4737e29c64a9b356" + +[[d1_databases]] +binding = "LICENSE_DB" +database_name = "flowkit_license" +database_id = "70c14ac4-7a18-44ee-b9d1-bdead2f3090b" + +[vars] +CORS_ORIGIN = "*" diff --git a/desktop/electron-builder.yml b/desktop/electron-builder.yml new file mode 100644 index 0000000..6eb8520 --- /dev/null +++ b/desktop/electron-builder.yml @@ -0,0 +1,42 @@ +appId: com.flowkit.app +productName: FlowKit +copyright: Copyright © 2025 FlowKit + +directories: + buildResources: resources + output: dist + +files: + - out/** + +extraResources: + - from: ../extension + to: extension + filter: + - "**/*" + - "!_metadata" + - from: resources/agent + to: agent + filter: + - "**/*" + +mac: + category: public.app-category.productivity + target: + - target: dmg + arch: [arm64, x64] + icon: resources/icon.icns + +win: + target: + - target: nsis + arch: [x64] + icon: resources/icon.ico + +nsis: + oneClick: false + allowToChangeInstallationDirectory: true + installerIcon: resources/icon.ico + uninstallerIcon: resources/icon.ico + createDesktopShortcut: true + createStartMenuShortcut: true diff --git a/desktop/electron.vite.config.ts b/desktop/electron.vite.config.ts new file mode 100644 index 0000000..7195ce5 --- /dev/null +++ b/desktop/electron.vite.config.ts @@ -0,0 +1,33 @@ +import { defineConfig, externalizeDepsPlugin } from 'electron-vite' +import react from '@vitejs/plugin-react' +import tailwindcss from '@tailwindcss/vite' +import { resolve } from 'path' + +export default defineConfig({ + main: { + plugins: [externalizeDepsPlugin()], + build: { + lib: { + entry: resolve(__dirname, 'electron/main.ts'), + }, + }, + }, + preload: { + plugins: [externalizeDepsPlugin()], + build: { + lib: { + entry: resolve(__dirname, 'electron/preload.ts'), + }, + }, + }, + renderer: { + root: resolve(__dirname, 'src'), + plugins: [react(), tailwindcss()], + server: { port: 5173 }, + build: { + rollupOptions: { + input: resolve(__dirname, 'src/index.html'), + }, + }, + }, +}) diff --git a/desktop/electron/license.ts b/desktop/electron/license.ts new file mode 100644 index 0000000..6122992 --- /dev/null +++ b/desktop/electron/license.ts @@ -0,0 +1,321 @@ +import { execFile } from 'child_process' +import { createHash } from 'crypto' +import { mkdir, readFile, writeFile } from 'fs/promises' +import os from 'os' +import { dirname } from 'path' +import { promisify } from 'util' + +const execFileAsync = promisify(execFile) +const SHA256_RE = /^[a-f0-9]{64}$/i +const MACHINE_ID_PREFIX = 'FKM' +const DEFAULT_TIMEOUT_MS = 7000 + +export const DEFAULT_LICENSE_API_BASE = 'https://flowkit-license.sitegrows.workers.dev' + +export type LicenseStatus = 'ACTIVE' | 'EXPIRED' | 'REVOKED' | 'PENDING' | 'ERROR' + +export interface LicenseConfig { + apiBaseUrl: string +} + +export interface LicenseCheckResult { + allowed: boolean + status: LicenseStatus + machineId: string + machineHash: string | null + planCode: string | null + planLabel: string | null + activatedAt: string | null + expiresAt: string | null + revokedReason: string | null + checkedAt: string + serverTime: string | null + source: 'remote' | 'cache' + apiBaseUrl: string + message: string +} + +interface CachedLicenseRecord extends LicenseCheckResult { + cacheSavedAt: string +} + +let machineIdCache: string | null = null +let machineIdPromise: Promise | null = null + +function normalizeApiBaseUrl(raw: string): string { + const value = (raw || '').trim() + if (!value) return DEFAULT_LICENSE_API_BASE + const normalized = value.replace(/\/+$/, '') + if (/^https?:\/\//i.test(normalized)) return normalized + return `https://${normalized}` +} + +function nowIso(): string { + return new Date().toISOString() +} + +function sha256(input: string): string { + return createHash('sha256').update(input).digest('hex') +} + +function formatMachineId(hash: string): string { + return `${MACHINE_ID_PREFIX}-${hash.slice(0, 8)}-${hash.slice(8, 16)}-${hash.slice(16, 24)}-${hash.slice(24, 32)}`.toUpperCase() +} + +async function readText(path: string): Promise { + try { + const data = await readFile(path, 'utf-8') + const value = data.trim() + return value.length > 0 ? value : null + } catch { + return null + } +} + +async function readCommand(command: string, args: string[]): Promise { + try { + const { stdout } = await execFileAsync(command, args, { + timeout: 1500, + maxBuffer: 256 * 1024, + windowsHide: true, + }) + const output = stdout.trim() + return output.length > 0 ? output : null + } catch { + return null + } +} + +async function getDarwinSeed(): Promise { + const output = await readCommand('ioreg', ['-rd1', '-c', 'IOPlatformExpertDevice']) + if (!output) return null + const match = output.match(/"IOPlatformUUID"\s*=\s*"([^"]+)"/) + return match?.[1]?.trim() ?? null +} + +async function getWindowsSeed(): Promise { + const output = await readCommand('powershell.exe', ['-NoProfile', '-Command', '(Get-CimInstance Win32_ComputerSystemProduct).UUID']) + if (!output) return null + const value = output.split(/\r?\n/).map((line) => line.trim()).find((line) => !!line && line.toLowerCase() !== 'uuid') + return value ?? null +} + +async function getLinuxSeed(): Promise { + const fromMachineId = await readText('/etc/machine-id') + if (fromMachineId) return fromMachineId + return readText('/var/lib/dbus/machine-id') +} + +function fallbackSeed(): string { + const interfaces = os.networkInterfaces() + const macs = Object.values(interfaces) + .flatMap((items) => items ?? []) + .filter((item) => !item.internal && item.mac && item.mac !== '00:00:00:00:00:00') + .map((item) => item.mac.toLowerCase()) + .sort() + + return [ + os.platform(), + os.arch(), + os.hostname(), + os.release(), + macs.join('|'), + ].join('::') +} + +async function resolveMachineSeed(): Promise { + if (process.platform === 'darwin') { + const value = await getDarwinSeed() + if (value) return value + } + + if (process.platform === 'win32') { + const value = await getWindowsSeed() + if (value) return value + } + + if (process.platform === 'linux') { + const value = await getLinuxSeed() + if (value) return value + } + + return fallbackSeed() +} + +export async function getMachineId(): Promise { + if (machineIdCache) return machineIdCache + if (machineIdPromise) return machineIdPromise + + machineIdPromise = (async () => { + const seed = await resolveMachineSeed() + const hash = sha256(seed) + const machineId = formatMachineId(hash) + machineIdCache = machineId + return machineId + })().finally(() => { + machineIdPromise = null + }) + + return machineIdPromise +} + +async function readJsonFile(path: string): Promise { + try { + const raw = await readFile(path, 'utf-8') + return JSON.parse(raw) as T + } catch { + return null + } +} + +async function writeJsonFile(path: string, data: unknown): Promise { + const folder = dirname(path) + await mkdir(folder, { recursive: true }) + await writeFile(path, JSON.stringify(data, null, 2), 'utf-8') +} + +export async function loadLicenseConfig(configPath: string, defaultApiBase: string): Promise { + const file = await readJsonFile>(configPath) + return { + apiBaseUrl: normalizeApiBaseUrl(file?.apiBaseUrl || defaultApiBase), + } +} + +export async function saveLicenseConfig(configPath: string, apiBaseUrl: string, defaultApiBase: string): Promise { + const config: LicenseConfig = { apiBaseUrl: normalizeApiBaseUrl(apiBaseUrl || defaultApiBase) } + await writeJsonFile(configPath, config) + return config +} + +function parseStatus(raw: unknown, allowed: boolean): LicenseStatus { + const upper = typeof raw === 'string' ? raw.trim().toUpperCase() : '' + if (upper === 'ACTIVE' || upper === 'EXPIRED' || upper === 'REVOKED' || upper === 'PENDING' || upper === 'ERROR') { + return upper + } + return allowed ? 'ACTIVE' : 'PENDING' +} + +function isCacheStillValid(cache: CachedLicenseRecord | null): boolean { + if (!cache || !cache.allowed || cache.status !== 'ACTIVE') return false + if (!cache.expiresAt) return true + const expires = new Date(cache.expiresAt).getTime() + if (Number.isNaN(expires)) return false + return expires > Date.now() +} + +async function readCache(cachePath: string): Promise { + const cache = await readJsonFile(cachePath) + if (!cache) return null + if (cache.status !== 'ACTIVE') return null + return cache +} + +async function saveCache(cachePath: string, result: LicenseCheckResult): Promise { + if (!result.allowed || result.status !== 'ACTIVE') return + const payload: CachedLicenseRecord = { + ...result, + cacheSavedAt: nowIso(), + } + await writeJsonFile(cachePath, payload) +} + +function normalizeMachineHash(raw: unknown): string | null { + if (typeof raw !== 'string') return null + const value = raw.trim().toLowerCase() + return SHA256_RE.test(value) ? value : null +} + +export async function checkLicense(options: { + apiBaseUrl: string + machineId: string + cachePath: string + appVersion: string + platform: string +}): Promise { + const apiBaseUrl = normalizeApiBaseUrl(options.apiBaseUrl) + const checkedAt = nowIso() + const body = { + machine_id: options.machineId, + app_version: options.appVersion, + platform: options.platform, + } + + try { + const controller = new AbortController() + const timeout = setTimeout(() => controller.abort(), DEFAULT_TIMEOUT_MS) + let response: Response + try { + response = await fetch(`${apiBaseUrl}/v1/device/check`, { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify(body), + signal: controller.signal, + }) + } finally { + clearTimeout(timeout) + } + + const data = await response.json() as Record + if (!response.ok) { + throw new Error(typeof data?.error === 'string' ? data.error : `HTTP_${response.status}`) + } + + const allowed = Boolean(data.allowed) + const status = parseStatus(data.status, allowed) + const result: LicenseCheckResult = { + allowed, + status, + machineId: options.machineId, + machineHash: normalizeMachineHash(data.machine_hash), + planCode: typeof data.plan_code === 'string' ? data.plan_code : null, + planLabel: typeof data.plan_label === 'string' ? data.plan_label : null, + activatedAt: typeof data.activated_at === 'string' ? data.activated_at : null, + expiresAt: typeof data.expires_at === 'string' ? data.expires_at : null, + revokedReason: typeof data.revoked_reason === 'string' ? data.revoked_reason : null, + checkedAt, + serverTime: typeof data.server_time === 'string' ? data.server_time : null, + source: 'remote', + apiBaseUrl, + message: typeof data.message === 'string' + ? data.message + : allowed + ? 'License đang hoạt động.' + : 'Thiết bị chưa được kích hoạt.', + } + + if (result.allowed && result.status === 'ACTIVE') { + await saveCache(options.cachePath, result) + } + + return result + } catch (error) { + const cached = await readCache(options.cachePath) + if (isCacheStillValid(cached)) { + return { + ...cached, + revokedReason: cached.revokedReason ?? null, + checkedAt, + source: 'cache', + apiBaseUrl, + message: 'Không kết nối được license server. Đang dùng giấy phép đã cache cục bộ.', + } + } + + return { + allowed: false, + status: 'ERROR', + machineId: options.machineId, + machineHash: null, + planCode: null, + planLabel: null, + activatedAt: null, + expiresAt: null, + revokedReason: null, + checkedAt, + serverTime: null, + source: 'remote', + apiBaseUrl, + message: error instanceof Error ? error.message : 'LICENSE_SERVER_UNREACHABLE', + } + } +} diff --git a/desktop/electron/main.ts b/desktop/electron/main.ts new file mode 100644 index 0000000..3ed2feb --- /dev/null +++ b/desktop/electron/main.ts @@ -0,0 +1,1353 @@ +import { app, BrowserWindow, dialog, ipcMain, Tray, Menu, nativeImage, shell, session, screen, safeStorage } from 'electron' +import { dirname, join } from 'path' +import { mkdirSync, readFileSync, writeFileSync, existsSync } from 'fs' +import { sidecar } from './sidecar' +import { checkLicense, DEFAULT_LICENSE_API_BASE, getMachineId, loadLicenseConfig, saveLicenseConfig, type LicenseCheckResult } from './license' + +let mainWindow: BrowserWindow | null = null +let flowWindow: BrowserWindow | null = null +let flowSidebarWindow: BrowserWindow | null = null +let tray: Tray | null = null +let flowExtensionId: string | null = null +let flowSessionPartition = '' +let appQuitting = false +let licenseEnforceTimer: ReturnType | null = null +let licenseEnforceInFlight = false +let licenseRevokedLockdown = false +let licenseRevokedNotified = false + +// Ensure unique app identity in dev mode to avoid collisions with generic Electron apps. +app.setName('FlowKit') +app.setPath('userData', join(app.getPath('appData'), 'FlowKit')) + +const gotSingleInstanceLock = app.requestSingleInstanceLock() +if (!gotSingleInstanceLock) { + app.quit() + process.exit(0) +} + +const ICON_PATH = join(__dirname, '../../resources/icon.png') +const EXTENSION_PATH = join(process.resourcesPath ?? join(__dirname, '../../..'), 'extension') +const FLOW_URL = 'https://labs.google/fx/tools/flow' +const FLOW_SIDEBAR_RATIO = 0.28 +const FLOW_SIDEBAR_MIN = 360 +const FLOW_SIDEBAR_MAX = 480 +const FLOW_SIDEBAR_GAP = 8 +const FLOW_SIDEBAR_MIN_HEIGHT = 560 +const FLOW_ACCOUNT_DEFAULT_ID = 'default' +const FLOW_ACCOUNT_DEFAULT_LABEL = 'Tài khoản mặc định' +const FLOW_ACCOUNT_PARTITION_PREFIX = 'persist:flowkit-flow-' +const FLOW_ACCOUNTS_CONFIG_PATH = join(app.getPath('userData'), 'flow-accounts.json') +const FLOW_UI_CONFIG_PATH = join(app.getPath('userData'), 'flow-ui.json') +const LICENSE_CONFIG_PATH = join(app.getPath('userData'), 'license-config.json') +const LICENSE_CACHE_PATH = join(app.getPath('userData'), 'license-cache.json') +const DEFAULT_LICENSE_API = process.env.FLOWKIT_LICENSE_API_BASE ?? DEFAULT_LICENSE_API_BASE +const LICENSE_REVOKE_POLL_MS = 5000 + +let lastLicenseCheck: LicenseCheckResult | null = null +const refererPatchedPartitions = new Set() +const extensionIdByPartition = new Map() + +type FlowAccount = { + id: string + label: string + email: string + passwordEnc: string + partition: string + createdAt: string + updatedAt: string +} + +type FlowAccountsConfig = { + activeAccountId: string + accounts: FlowAccount[] +} + +type FlowUIConfig = { + sidebarVisible: boolean +} + +function nowIso(): string { + return new Date().toISOString() +} + +function normalizeFlowAccountId(raw: unknown): string { + const cleaned = String(raw ?? '') + .trim() + .toLowerCase() + .replace(/[^a-z0-9_-]+/g, '-') + .replace(/-+/g, '-') + .replace(/^[-_]+|[-_]+$/g, '') + if (!cleaned) return '' + if (!/^[a-z0-9]/.test(cleaned)) return '' + return cleaned.slice(0, 40) +} + +function deriveFlowAccountId(label: string): string { + const base = normalizeFlowAccountId(label) || 'account' + return base +} + +function partitionForAccountId(accountId: string): string { + return `${FLOW_ACCOUNT_PARTITION_PREFIX}${accountId}` +} + +function encodeSecret(secret: string): string { + const plain = String(secret ?? '') + if (!plain) return '' + try { + if (safeStorage.isEncryptionAvailable()) { + const encrypted = safeStorage.encryptString(plain) + return `safe:${encrypted.toString('base64')}` + } + } catch (err) { + console.warn('[main] safeStorage encrypt failed, fallback plain:', err) + } + return `plain:${Buffer.from(plain, 'utf-8').toString('base64')}` +} + +function decodeSecret(cipher: string | null | undefined): string { + const raw = String(cipher ?? '').trim() + if (!raw) return '' + if (raw.startsWith('safe:')) { + const b64 = raw.slice('safe:'.length).trim() + if (!b64) return '' + try { + const buf = Buffer.from(b64, 'base64') + if (safeStorage.isEncryptionAvailable()) { + return safeStorage.decryptString(buf) + } + } catch (err) { + console.warn('[main] safeStorage decrypt failed:', err) + return '' + } + return '' + } + if (raw.startsWith('plain:')) { + const b64 = raw.slice('plain:'.length).trim() + if (!b64) return '' + try { + return Buffer.from(b64, 'base64').toString('utf-8') + } catch { + return '' + } + } + // Backward compatibility for old plain values. + return raw +} + +function normalizeFlowAccount(raw: any, index = 0): FlowAccount | null { + const id = normalizeFlowAccountId(raw?.id) || (index === 0 ? FLOW_ACCOUNT_DEFAULT_ID : '') + if (!id) return null + const labelRaw = String(raw?.label ?? '').trim() + const label = labelRaw || (id === FLOW_ACCOUNT_DEFAULT_ID ? FLOW_ACCOUNT_DEFAULT_LABEL : `Tài khoản ${index + 1}`) + const email = String(raw?.email ?? '').trim() + const partitionRaw = String(raw?.partition ?? '').trim() + const partition = partitionRaw.startsWith('persist:') + ? partitionRaw + : partitionForAccountId(id) + const passwordEnc = String(raw?.passwordEnc ?? '').trim() + const createdAt = String(raw?.createdAt ?? '').trim() || nowIso() + const updatedAt = String(raw?.updatedAt ?? '').trim() || createdAt + return { id, label, email, passwordEnc, partition, createdAt, updatedAt } +} + +function defaultFlowAccountsConfig(): FlowAccountsConfig { + const createdAt = nowIso() + return { + activeAccountId: FLOW_ACCOUNT_DEFAULT_ID, + accounts: [{ + id: FLOW_ACCOUNT_DEFAULT_ID, + label: FLOW_ACCOUNT_DEFAULT_LABEL, + email: '', + passwordEnc: '', + partition: partitionForAccountId(FLOW_ACCOUNT_DEFAULT_ID), + createdAt, + updatedAt: createdAt, + }], + } +} + +function defaultFlowUIConfig(): FlowUIConfig { + return { sidebarVisible: true } +} + +function normalizeFlowUIConfig(raw: any): FlowUIConfig { + return { + sidebarVisible: raw?.sidebarVisible !== false, + } +} + +function normalizeFlowAccountsConfig(raw: any): FlowAccountsConfig { + const listRaw = Array.isArray(raw?.accounts) ? raw.accounts : [] + const dedup = new Map() + listRaw.forEach((row: any, idx: number) => { + const normalized = normalizeFlowAccount(row, idx) + if (!normalized) return + dedup.set(normalized.id, normalized) + }) + if (!dedup.has(FLOW_ACCOUNT_DEFAULT_ID)) { + const fallback = defaultFlowAccountsConfig().accounts[0] + dedup.set(FLOW_ACCOUNT_DEFAULT_ID, fallback) + } + const accounts = Array.from(dedup.values()) + const activeCandidate = normalizeFlowAccountId(raw?.activeAccountId) + const activeAccountId = accounts.some((a) => a.id === activeCandidate) + ? activeCandidate + : accounts[0].id + return { activeAccountId, accounts } +} + +function loadFlowAccountsConfig(): FlowAccountsConfig { + try { + mkdirSync(dirname(FLOW_ACCOUNTS_CONFIG_PATH), { recursive: true }) + if (!existsSync(FLOW_ACCOUNTS_CONFIG_PATH)) { + const seeded = defaultFlowAccountsConfig() + writeFileSync(FLOW_ACCOUNTS_CONFIG_PATH, JSON.stringify(seeded, null, 2), 'utf-8') + return seeded + } + const raw = JSON.parse(readFileSync(FLOW_ACCOUNTS_CONFIG_PATH, 'utf-8')) + const normalized = normalizeFlowAccountsConfig(raw) + writeFileSync(FLOW_ACCOUNTS_CONFIG_PATH, JSON.stringify(normalized, null, 2), 'utf-8') + return normalized + } catch (err) { + console.error('[main] Failed to load flow accounts config, fallback default:', err) + const fallback = defaultFlowAccountsConfig() + try { + writeFileSync(FLOW_ACCOUNTS_CONFIG_PATH, JSON.stringify(fallback, null, 2), 'utf-8') + } catch { + // no-op + } + return fallback + } +} + +let flowAccountsConfig: FlowAccountsConfig = loadFlowAccountsConfig() + +function loadFlowUIConfig(): FlowUIConfig { + try { + mkdirSync(dirname(FLOW_UI_CONFIG_PATH), { recursive: true }) + if (!existsSync(FLOW_UI_CONFIG_PATH)) { + const seeded = defaultFlowUIConfig() + writeFileSync(FLOW_UI_CONFIG_PATH, JSON.stringify(seeded, null, 2), 'utf-8') + return seeded + } + const raw = JSON.parse(readFileSync(FLOW_UI_CONFIG_PATH, 'utf-8')) + const normalized = normalizeFlowUIConfig(raw) + writeFileSync(FLOW_UI_CONFIG_PATH, JSON.stringify(normalized, null, 2), 'utf-8') + return normalized + } catch (err) { + console.error('[main] Failed to load flow ui config, fallback default:', err) + const fallback = defaultFlowUIConfig() + try { + writeFileSync(FLOW_UI_CONFIG_PATH, JSON.stringify(fallback, null, 2), 'utf-8') + } catch { + // no-op + } + return fallback + } +} + +let flowUIConfig: FlowUIConfig = loadFlowUIConfig() +let flowSidebarVisible = flowUIConfig.sidebarVisible + +function saveFlowUIConfig(next: FlowUIConfig): FlowUIConfig { + const normalized = normalizeFlowUIConfig(next) + flowUIConfig = normalized + flowSidebarVisible = normalized.sidebarVisible + mkdirSync(dirname(FLOW_UI_CONFIG_PATH), { recursive: true }) + writeFileSync(FLOW_UI_CONFIG_PATH, JSON.stringify(normalized, null, 2), 'utf-8') + return normalized +} + +function saveFlowAccountsConfig(next: FlowAccountsConfig): FlowAccountsConfig { + const normalized = normalizeFlowAccountsConfig(next) + flowAccountsConfig = normalized + mkdirSync(dirname(FLOW_ACCOUNTS_CONFIG_PATH), { recursive: true }) + writeFileSync(FLOW_ACCOUNTS_CONFIG_PATH, JSON.stringify(normalized, null, 2), 'utf-8') + return normalized +} + +function getFlowAccountById(accountId?: string | null): FlowAccount { + const desired = normalizeFlowAccountId(accountId ?? '') || flowAccountsConfig.activeAccountId + return flowAccountsConfig.accounts.find((a) => a.id === desired) + ?? flowAccountsConfig.accounts[0] +} + +function isFlowSidebarActuallyVisible(): boolean { + return Boolean(flowSidebarWindow && !flowSidebarWindow.isDestroyed() && flowSidebarWindow.isVisible()) +} + +function getFlowPanelStatePayload() { + const sidebarAlive = Boolean(flowSidebarWindow && !flowSidebarWindow.isDestroyed()) + const sidebarShown = isFlowSidebarActuallyVisible() + return { + // Reflect actual on-screen state (not only requested config state). + visible: sidebarShown, + sidebarReady: sidebarAlive, + flowReady: Boolean(flowWindow && !flowWindow.isDestroyed()), + requestedVisible: flowSidebarVisible, + } +} + +function emitFlowPanelStateChanged() { + mainWindow?.webContents.send('flow-panel-state-changed', getFlowPanelStatePayload()) +} + +// In dev mode, use local extension path +const extensionPath = app.isPackaged + ? EXTENSION_PATH + : join(__dirname, '../../../extension') + +function patchRefererHeaderForSession(ses: Electron.Session, partitionKey: string) { + if (refererPatchedPartitions.has(partitionKey)) return + ses.webRequest.onBeforeSendHeaders( + { urls: ['https://aisandbox-pa.googleapis.com/*'] }, + (details, callback) => { + const headers = { ...details.requestHeaders } + headers['Referer'] = 'https://labs.google/' + callback({ requestHeaders: headers }) + } + ) + refererPatchedPartitions.add(partitionKey) +} + +function getExtensionsHost(ses: Electron.Session): any { + return (ses as any).extensions ?? ses +} + +async function ensureExtensionLoadedForPartition(ses: Electron.Session, partitionKey: string): Promise { + const cached = extensionIdByPartition.get(partitionKey) + if (cached) return cached + const extHost = getExtensionsHost(ses) + try { + const loaded = await extHost.loadExtension(extensionPath, { allowFileAccess: true }) + const id = (loaded as any)?.id ?? '' + if (!id) throw new Error('Missing extension id') + extensionIdByPartition.set(partitionKey, id) + return id + } catch (err) { + const all = typeof extHost.getAllExtensions === 'function' + ? await extHost.getAllExtensions() + : [] + const existing = Array.isArray(all) + ? all.find((ext: any) => { + const p = String(ext?.path ?? '') + const n = String(ext?.name ?? '').toLowerCase() + return n.includes('flow kit') || p.includes('/extension') || p.endsWith('\\extension') + }) + : null + const id = String(existing?.id ?? '') + if (id) { + extensionIdByPartition.set(partitionKey, id) + return id + } + throw err + } +} + +async function unloadExtensionForPartition(partitionKey: string) { + const id = extensionIdByPartition.get(partitionKey) + if (!id) return + try { + const ses = session.fromPartition(partitionKey) + const extHost = getExtensionsHost(ses) + if (typeof extHost.removeExtension === 'function') { + await extHost.removeExtension(id) + } + } catch (err) { + console.warn('[main] removeExtension failed:', { partitionKey, id, err }) + } finally { + extensionIdByPartition.delete(partitionKey) + } +} + +async function prepareFlowRuntimeForAccount(accountId?: string | null): Promise<{ account: FlowAccount; flowSession: Electron.Session; extensionId: string }> { + const account = getFlowAccountById(accountId) + const partitionKey = account.partition + const flowSession = session.fromPartition(partitionKey) + patchRefererHeaderForSession(flowSession, partitionKey) + const extensionId = await ensureExtensionLoadedForPartition(flowSession, partitionKey) + + if (flowSessionPartition && flowSessionPartition !== partitionKey) { + await unloadExtensionForPartition(flowSessionPartition) + } + + flowSessionPartition = partitionKey + flowExtensionId = extensionId + if (flowAccountsConfig.activeAccountId !== account.id) { + saveFlowAccountsConfig({ + ...flowAccountsConfig, + activeAccountId: account.id, + }) + } + + return { account, flowSession, extensionId } +} + +function calcFlowSidebarWidth(totalWidth: number): number { + const byRatio = Math.floor(totalWidth * FLOW_SIDEBAR_RATIO) + return Math.max(FLOW_SIDEBAR_MIN, Math.min(FLOW_SIDEBAR_MAX, byRatio)) +} + +function layoutFlowWindows() { + if (!flowWindow || flowWindow.isDestroyed()) return + if (!flowSidebarWindow || flowSidebarWindow.isDestroyed()) return + + if (!flowSidebarVisible) { + if (flowSidebarWindow.isVisible()) flowSidebarWindow.hide() + return + } + + const flowBounds = flowWindow.getBounds() + const display = screen.getDisplayMatching(flowBounds) + const workArea = display.workArea + const sidebarWidth = calcFlowSidebarWidth(flowBounds.width) + const sidebarHeight = Math.max(FLOW_SIDEBAR_MIN_HEIGHT, flowBounds.height) + const rightX = flowBounds.x + flowBounds.width + FLOW_SIDEBAR_GAP + const leftX = flowBounds.x - sidebarWidth - FLOW_SIDEBAR_GAP + const canFitRight = rightX + sidebarWidth <= workArea.x + workArea.width + const canFitLeft = leftX >= workArea.x + + // Prefer non-overlap. If no room beside Flow window, dock as floating window in work area. + if (!canFitRight && !canFitLeft) { + const x = Math.max(workArea.x, workArea.x + workArea.width - sidebarWidth) + const y = Math.max(workArea.y, Math.min(flowBounds.y, workArea.y + workArea.height - sidebarHeight)) + flowSidebarWindow.setBounds({ x, y, width: sidebarWidth, height: sidebarHeight }) + if (flowWindow.isVisible() && !flowSidebarWindow.isVisible()) { + if (typeof flowSidebarWindow.showInactive === 'function') flowSidebarWindow.showInactive() + else flowSidebarWindow.show() + } + return + } + const x = canFitRight ? rightX : leftX + + let y = flowBounds.y + if (y + sidebarHeight > workArea.y + workArea.height) { + y = Math.max(workArea.y, workArea.y + workArea.height - sidebarHeight) + } + + flowSidebarWindow.setBounds({ x, y, width: sidebarWidth, height: sidebarHeight }) + if (flowWindow.isVisible() && !flowSidebarWindow.isVisible()) { + if (typeof flowSidebarWindow.showInactive === 'function') flowSidebarWindow.showInactive() + else flowSidebarWindow.show() + } +} + +function createFlowSidebarWindow(showOnReady = false) { + if (!flowExtensionId) { + flowSidebarWindow = null + console.warn('[main] Flow extension ID unavailable — sidebar window disabled') + return null + } + if (flowSidebarWindow && !flowSidebarWindow.isDestroyed()) { + layoutFlowWindows() + if (showOnReady && flowSidebarVisible && !flowSidebarWindow.isVisible()) { + if (typeof flowSidebarWindow.showInactive === 'function') flowSidebarWindow.showInactive() + else flowSidebarWindow.show() + } + return flowSidebarWindow + } + + flowSidebarWindow = new BrowserWindow({ + width: FLOW_SIDEBAR_MIN, + height: 900, + minWidth: FLOW_SIDEBAR_MIN, + minHeight: FLOW_SIDEBAR_MIN_HEIGHT, + title: 'Flow Agent', + backgroundColor: '#0a0f1f', + autoHideMenuBar: true, + show: false, + webPreferences: { + contextIsolation: true, + nodeIntegration: false, + session: session.fromPartition(flowSessionPartition || getFlowAccountById().partition), + } + }) + flowSidebarWindow.setMenuBarVisibility(false) + flowSidebarWindow.webContents.setWindowOpenHandler(({ url }) => { + void shell.openExternal(url) + return { action: 'deny' } + }) + + const sidePanelUrl = `chrome-extension://${flowExtensionId}/side_panel.html` + flowSidebarWindow.webContents.loadURL(sidePanelUrl).catch((err) => { + console.error('[main] Failed to load extension sidebar window:', err) + }) + flowSidebarWindow.webContents.on('did-finish-load', () => { + console.log('[main] Flow sidebar window loaded:', flowSidebarWindow?.webContents.getURL()) + }) + flowSidebarWindow.webContents.on('did-fail-load', (_e, code, desc, url) => { + console.error('[main] Flow sidebar window failed:', { code, desc, url }) + }) + + flowSidebarWindow.on('close', (event) => { + if (appQuitting) return + event.preventDefault() + flowSidebarWindow?.hide() + emitFlowPanelStateChanged() + }) + + flowSidebarWindow.on('show', () => emitFlowPanelStateChanged()) + flowSidebarWindow.on('hide', () => emitFlowPanelStateChanged()) + + flowSidebarWindow.on('closed', () => { + flowSidebarWindow = null + emitFlowPanelStateChanged() + }) + + layoutFlowWindows() + if (showOnReady && flowSidebarVisible) { + if (typeof flowSidebarWindow.showInactive === 'function') flowSidebarWindow.showInactive() + else flowSidebarWindow.show() + } + return flowSidebarWindow +} + + +function createMainWindow() { + mainWindow = new BrowserWindow({ + width: 1280, + height: 800, + minWidth: 900, + minHeight: 600, + titleBarStyle: process.platform === 'darwin' ? 'hiddenInset' : 'default', + backgroundColor: '#eef3fb', + show: false, + webPreferences: { + preload: join(__dirname, '../preload/preload.js'), + contextIsolation: true, + nodeIntegration: false, + } + }) + + // Load renderer + if (process.env.ELECTRON_RENDERER_URL) { + mainWindow.loadURL(process.env.ELECTRON_RENDERER_URL) + } else { + // electron-vite outputs renderer assets to out/renderer in preview/production + mainWindow.loadFile(join(__dirname, '../renderer/index.html')) + } + + mainWindow.once('ready-to-show', () => { + mainWindow?.show() + mainWindow?.focus() + }) + + // Safety net: ensure dashboard is visible even if ready-to-show is delayed. + setTimeout(() => { + if (mainWindow && !mainWindow.isDestroyed() && !mainWindow.isVisible()) { + console.warn('[main] Forcing main window show after startup timeout') + mainWindow.show() + mainWindow.focus() + } + }, 2500) + + mainWindow.webContents.on('did-finish-load', () => { + console.log('[main] Main window renderer loaded:', mainWindow?.webContents.getURL()) + emitFlowPanelStateChanged() + }) + + mainWindow.webContents.on('did-fail-load', (_e, code, desc, url) => { + console.error('[main] Main window failed to load:', { code, desc, url }) + }) + + mainWindow.on('closed', () => { + mainWindow = null + }) +} + +function createFlowWindow(flowSession: Electron.Session, account: FlowAccount, opts: { focusOnShow?: boolean; revealOnReady?: boolean } = {}) { + console.log('[main] Creating Flow window') + const focusOnShow = opts.focusOnShow === true + const revealOnReady = opts.revealOnReady !== false + flowWindow = new BrowserWindow({ + width: 1420, + height: 900, + minWidth: 1080, + minHeight: 680, + title: account?.label ? `Google Flow • ${account.label}` : 'Google Flow', + backgroundColor: '#0a0f1f', + show: false, + webPreferences: { + contextIsolation: true, + nodeIntegration: false, + session: flowSession, + } + }) + + flowWindow.loadURL(FLOW_URL).catch((err) => { + console.error('[main] Failed to load Flow URL:', err) + }) + flowWindow.webContents.on('did-finish-load', () => { + console.log('[main] Flow content loaded:', flowWindow?.webContents.getURL()) + }) + flowWindow.webContents.on('did-fail-load', (_e, code, desc, url) => { + console.error('[main] Flow content failed:', { code, desc, url }) + }) + flowWindow.webContents.setWindowOpenHandler(({ url }) => { + void shell.openExternal(url) + return { action: 'deny' } + }) + flowWindow.once('ready-to-show', () => { + if (!flowWindow || flowWindow.isDestroyed() || !revealOnReady) return + if (focusOnShow) { + flowWindow.show() + createFlowSidebarWindow(true) + layoutFlowWindows() + flowWindow.focus() + return + } + if (typeof flowWindow.showInactive === 'function') flowWindow.showInactive() + else flowWindow.show() + createFlowSidebarWindow(true) + layoutFlowWindows() + if (mainWindow && !mainWindow.isDestroyed()) mainWindow.focus() + }) + + createFlowSidebarWindow(false) + + layoutFlowWindows() + flowWindow.on('move', layoutFlowWindows) + flowWindow.on('resize', layoutFlowWindows) + flowWindow.on('maximize', layoutFlowWindows) + flowWindow.on('unmaximize', layoutFlowWindows) + flowWindow.on('enter-full-screen', layoutFlowWindows) + flowWindow.on('leave-full-screen', layoutFlowWindows) + + flowWindow.on('close', (event) => { + if (appQuitting) return + // Keep Flow session alive for token/captcha; hide instead of destroying. + event.preventDefault() + flowWindow?.hide() + if (flowSidebarWindow && !flowSidebarWindow.isDestroyed()) { + flowSidebarWindow.hide() + } + emitFlowPanelStateChanged() + }) + + flowWindow.on('closed', () => { + if (flowSidebarWindow && !flowSidebarWindow.isDestroyed()) { + flowSidebarWindow.destroy() + } + flowSidebarWindow = null + flowWindow = null + emitFlowPanelStateChanged() + }) + + flowWindow.on('show', () => emitFlowPanelStateChanged()) + flowWindow.on('hide', () => emitFlowPanelStateChanged()) +} + +function createTray() { + try { + const rawIcon = nativeImage.createFromPath(ICON_PATH) + if (rawIcon.isEmpty()) { + console.warn('[main] Tray icon is invalid/empty, skipping tray creation:', ICON_PATH) + return + } + + const icon = rawIcon.resize({ width: 16, height: 16 }) + tray = new Tray(icon) + tray.setToolTip('FlowKit') + } catch (err) { + console.error('[main] Failed to create tray, continue without tray:', err) + tray = null + return + } + + const updateMenu = (agentStatus: string) => { + const menu = Menu.buildFromTemplate([ + { label: `FlowKit — ${agentStatus}`, enabled: false }, + { type: 'separator' }, + { label: 'Open Dashboard', click: () => { mainWindow?.show(); mainWindow?.focus() } }, + { label: 'Open Google Flow', click: () => { void openFlowWindow({ focus: true, reveal: true }) } }, + { type: 'separator' }, + { label: 'Quit', click: () => app.quit() } + ]) + tray?.setContextMenu(menu) + } + + updateMenu('Starting...') + + // Update tray when agent status changes + sidecar.on('status', (status: string) => updateMenu(status)) + + tray.on('click', () => { + mainWindow?.show() + mainWindow?.focus() + }) +} + +function destroyFlowWindowsForAccountSwitch() { + const sidebar = flowSidebarWindow + flowSidebarWindow = null + if (sidebar && !sidebar.isDestroyed()) { + try { sidebar.removeAllListeners('close') } catch { } + try { sidebar.close() } catch { } + try { sidebar.destroy() } catch { } + } + + const flow = flowWindow + flowWindow = null + if (flow && !flow.isDestroyed()) { + try { flow.removeAllListeners('close') } catch { } + try { flow.close() } catch { } + try { flow.destroy() } catch { } + } +} + +async function openFlowWindow(options: { focus?: boolean; reveal?: boolean; accountId?: string; forceRecreate?: boolean } = {}) { + console.log('[main] openFlowWindow invoked') + const focus = options.focus === true + const reveal = options.reveal !== false + const requestedAccount = getFlowAccountById(options.accountId) + const desiredPartition = requestedAccount.partition + const previousPartition = flowSessionPartition + + try { + await prepareFlowRuntimeForAccount(requestedAccount.id) + } catch (err) { + console.error('[main] Failed to prepare Flow runtime:', err) + throw err + } + + const sessionChanged = previousPartition !== '' && previousPartition !== desiredPartition + if (sessionChanged || options.forceRecreate) { + destroyFlowWindowsForAccountSwitch() + } + + if (flowWindow && !flowWindow.isDestroyed()) { + if (!reveal) return + if (!flowWindow.isVisible()) { + if (!focus && typeof flowWindow.showInactive === 'function') flowWindow.showInactive() + else flowWindow.show() + } + createFlowSidebarWindow(true) + layoutFlowWindows() + if (focus) flowWindow.focus() + else if (mainWindow && !mainWindow.isDestroyed()) mainWindow.focus() + } else { + const activeAccount = getFlowAccountById(requestedAccount.id) + const flowSession = session.fromPartition(activeAccount.partition) + createFlowWindow(flowSession, activeAccount, { focusOnShow: focus, revealOnReady: reveal }) + } +} + +async function setFlowPanelVisibility(visible: boolean, options?: { persist?: boolean; revealFlowIfNeeded?: boolean }) { + flowSidebarVisible = visible + if (options?.persist !== false) { + saveFlowUIConfig({ + ...flowUIConfig, + sidebarVisible: visible, + }) + } + + if (!visible) { + if (flowSidebarWindow && !flowSidebarWindow.isDestroyed()) { + flowSidebarWindow.hide() + } + emitFlowPanelStateChanged() + return getFlowPanelStatePayload() + } + + // Ensure extension/session is prepared before trying to show panel. + try { + await prepareFlowRuntimeForAccount(flowAccountsConfig.activeAccountId) + } catch (err) { + console.error('[main] Failed to prepare Flow runtime while showing panel:', err) + } + + if ((!flowWindow || flowWindow.isDestroyed()) && options?.revealFlowIfNeeded) { + await openFlowWindow({ + focus: false, + reveal: true, + accountId: flowAccountsConfig.activeAccountId, + }) + } + + if (flowWindow && !flowWindow.isDestroyed()) { + // If Flow window exists but hidden, reveal it to anchor sidebar layout. + if (!flowWindow.isVisible()) { + if (typeof flowWindow.showInactive === 'function') flowWindow.showInactive() + else flowWindow.show() + } + createFlowSidebarWindow(true) + layoutFlowWindows() + if (flowSidebarWindow && !flowSidebarWindow.isDestroyed() && !flowSidebarWindow.isVisible()) { + if (typeof flowSidebarWindow.showInactive === 'function') flowSidebarWindow.showInactive() + else flowSidebarWindow.show() + } + } + emitFlowPanelStateChanged() + return getFlowPanelStatePayload() +} + +function getCurrentFlowSession(): Electron.Session { + const account = getFlowAccountById() + const partitionKey = flowSessionPartition || account.partition + return session.fromPartition(partitionKey) +} + +function sleep(ms: number): Promise { + return new Promise((resolve) => setTimeout(resolve, ms)) +} + +async function waitForWebContentsReady(webContents: Electron.WebContents | null | undefined, timeoutMs = 8000): Promise { + if (!webContents || webContents.isDestroyed()) return false + if (!webContents.isLoadingMainFrame()) return true + return await new Promise((resolve) => { + let settled = false + const finish = (ok: boolean) => { + if (settled) return + settled = true + clearTimeout(timer) + try { webContents.removeListener('did-finish-load', onFinish) } catch { } + try { webContents.removeListener('did-fail-load', onFail) } catch { } + resolve(ok) + } + const onFinish = () => finish(true) + const onFail = () => finish(false) + const timer = setTimeout(() => finish(!webContents.isLoadingMainFrame()), timeoutMs) + webContents.once('did-finish-load', onFinish) + webContents.once('did-fail-load', onFail) + }) +} + +async function waitForExtensionConnected(timeoutMs = 12000): Promise { + const start = Date.now() + while (Date.now() - start < timeoutMs) { + try { + const res = await fetch('http://127.0.0.1:8100/health', { signal: AbortSignal.timeout(1500) }) + if (res.ok) { + const health = await res.json() as { extension_connected?: boolean } + if (Boolean(health?.extension_connected)) return true + } + } catch { + // retry + } + await sleep(500) + } + return false +} + +async function requestExtensionReconnectViaSidebar(): Promise { + const sidebar = createFlowSidebarWindow(false) + if (!sidebar || sidebar.webContents.isDestroyed()) return false + const ready = await waitForWebContentsReady(sidebar.webContents, 6000) + if (!ready) return false + try { + const viaSidePanel = await sidebar.webContents.executeJavaScript(` + new Promise((resolve) => { + try { + chrome.runtime.sendMessage({ type: 'RECONNECT' }, (resp) => { + const err = chrome.runtime.lastError; + if (err) { + resolve({ ok: false, error: err.message || 'runtime_send_failed' }); + return; + } + resolve({ ok: !!(resp && resp.ok), raw: resp || null }); + }); + } catch (e) { + resolve({ ok: false, error: e?.message || String(e) }); + } + }); + `, true) as { ok?: boolean; error?: string } | undefined + return Boolean(viaSidePanel?.ok) + } catch (err) { + console.warn('[main] reconnect via side-panel failed:', err) + return false + } +} + +async function getLicenseConfig() { + return loadLicenseConfig(LICENSE_CONFIG_PATH, DEFAULT_LICENSE_API) +} + +async function performLicenseCheck(force = false): Promise { + if (!force && lastLicenseCheck) { + const elapsed = Date.now() - new Date(lastLicenseCheck.checkedAt).getTime() + if (elapsed < 5000) { + return lastLicenseCheck + } + } + + const machineId = await getMachineId() + const config = await getLicenseConfig() + const result = await checkLicense({ + apiBaseUrl: config.apiBaseUrl, + machineId, + cachePath: LICENSE_CACHE_PATH, + appVersion: app.getVersion(), + platform: process.platform, + }) + lastLicenseCheck = result + return result +} + +function stopLicenseEnforcer() { + if (licenseEnforceTimer) { + clearInterval(licenseEnforceTimer) + licenseEnforceTimer = null + } +} + +function pushLicenseStatusToRenderer(result: LicenseCheckResult) { + mainWindow?.webContents.send('license-status-changed', result) +} + +async function enforceLicenseRevocation(source: 'startup' | 'poll'): Promise { + if (licenseEnforceInFlight) return + licenseEnforceInFlight = true + try { + const result = await performLicenseCheck(true) + pushLicenseStatusToRenderer(result) + + if (result.status === 'REVOKED') { + if (!licenseRevokedLockdown) { + console.error('[license] Device revoked. Locking app until re-activated.', { + source, + machineId: result.machineId, + reason: result.revokedReason ?? null, + }) + } + licenseRevokedLockdown = true + sidecar.stop() + + if (!licenseRevokedNotified) { + licenseRevokedNotified = true + const reasonLine = result.revokedReason + ? `\nLý do: ${result.revokedReason}` + : '' + void dialog.showMessageBox(mainWindow ?? undefined, { + type: 'warning', + title: 'FlowKit license đã bị thu hồi (REVOKED)', + message: `Thiết bị này không còn quyền sử dụng FlowKit.${reasonLine}`, + detail: 'Ứng dụng vẫn mở để bạn sao chép Machine ID và yêu cầu admin Active lại.', + buttons: ['Đã hiểu'], + defaultId: 0, + }) + } + return + } + + if (licenseRevokedLockdown) { + console.log('[license] Device re-activated. Resuming sidecar.') + licenseRevokedLockdown = false + sidecar.start() + } + licenseRevokedNotified = false + } catch (err) { + console.error('[license] Revoke enforcement check failed:', err) + } finally { + licenseEnforceInFlight = false + } +} + +function startLicenseEnforcer() { + if (licenseEnforceTimer) return + void enforceLicenseRevocation('startup') + licenseEnforceTimer = setInterval(() => { + void enforceLicenseRevocation('poll') + }, LICENSE_REVOKE_POLL_MS) +} + +function listFlowAccountsPayload() { + return { + activeAccountId: flowAccountsConfig.activeAccountId, + accounts: flowAccountsConfig.accounts.map((account) => ({ + id: account.id, + label: account.label, + email: account.email, + partition: account.partition, + createdAt: account.createdAt, + updatedAt: account.updatedAt, + })), + } +} + +function createFlowAccount(payload?: { id?: string; label?: string; email?: string; setActive?: boolean }) { + const label = String(payload?.label ?? '').trim() || `Tài khoản ${flowAccountsConfig.accounts.length + 1}` + let id = normalizeFlowAccountId(payload?.id) || deriveFlowAccountId(label) + if (!id) id = `account-${flowAccountsConfig.accounts.length + 1}` + if (id === FLOW_ACCOUNT_DEFAULT_ID && flowAccountsConfig.accounts.some((a) => a.id === FLOW_ACCOUNT_DEFAULT_ID)) { + id = `account-${flowAccountsConfig.accounts.length + 1}` + } + while (flowAccountsConfig.accounts.some((a) => a.id === id)) { + const suffix = Math.floor(Math.random() * 9000) + 1000 + id = `${id.slice(0, 30)}-${suffix}` + } + const createdAt = nowIso() + const nextAccount: FlowAccount = { + id, + label, + email: String(payload?.email ?? '').trim(), + partition: partitionForAccountId(id), + createdAt, + updatedAt: createdAt, + } + const shouldSetActive = payload?.setActive !== false + saveFlowAccountsConfig({ + activeAccountId: shouldSetActive ? nextAccount.id : flowAccountsConfig.activeAccountId, + accounts: [...flowAccountsConfig.accounts, nextAccount], + }) + return listFlowAccountsPayload() +} + +function updateFlowAccount(payload?: { id?: string; label?: string; email?: string }) { + const id = normalizeFlowAccountId(payload?.id) + if (!id) throw new Error('ID tài khoản không hợp lệ') + const index = flowAccountsConfig.accounts.findIndex((a) => a.id === id) + if (index < 0) throw new Error('Không tìm thấy tài khoản') + const current = flowAccountsConfig.accounts[index] + const next: FlowAccount = { + ...current, + label: String(payload?.label ?? '').trim() || current.label, + email: String(payload?.email ?? '').trim(), + updatedAt: nowIso(), + } + const accounts = [...flowAccountsConfig.accounts] + accounts[index] = next + saveFlowAccountsConfig({ ...flowAccountsConfig, accounts }) + return listFlowAccountsPayload() +} + +async function clearFlowAccountSession(accountId?: string | null) { + const account = getFlowAccountById(accountId) + const ses = session.fromPartition(account.partition) + await ses.clearStorageData({ + storages: ['cookies', 'serviceworkers', 'localstorage', 'indexeddb', 'cachestorage'], + }) + await ses.clearCache() + await ses.clearAuthCache() + if (typeof ses.clearHostResolverCache === 'function') await ses.clearHostResolverCache() + if (typeof ses.flushStorageData === 'function') ses.flushStorageData() +} + +async function deleteFlowAccount(accountId?: string | null) { + const id = normalizeFlowAccountId(accountId) + if (!id) throw new Error('ID tài khoản không hợp lệ') + if (flowAccountsConfig.accounts.length <= 1) { + throw new Error('Cần giữ ít nhất một tài khoản') + } + const target = flowAccountsConfig.accounts.find((a) => a.id === id) + if (!target) throw new Error('Không tìm thấy tài khoản') + const remaining = flowAccountsConfig.accounts.filter((a) => a.id !== id) + const nextActive = flowAccountsConfig.activeAccountId === id + ? remaining[0].id + : flowAccountsConfig.activeAccountId + + if (flowSessionPartition === target.partition) { + destroyFlowWindowsForAccountSwitch() + flowSessionPartition = '' + flowExtensionId = null + } + await unloadExtensionForPartition(target.partition) + saveFlowAccountsConfig({ activeAccountId: nextActive, accounts: remaining }) + return listFlowAccountsPayload() +} + +async function setActiveFlowAccount(accountId?: string | null, options?: { openFlow?: boolean; focus?: boolean }) { + const account = getFlowAccountById(accountId) + saveFlowAccountsConfig({ + ...flowAccountsConfig, + activeAccountId: account.id, + }) + if (options?.openFlow) { + await openFlowWindow({ + accountId: account.id, + reveal: true, + focus: options.focus ?? true, + forceRecreate: true, + }) + } + return listFlowAccountsPayload() +} + +// ─── IPC Handlers ──────────────────────────────────────────── + +ipcMain.handle('open-flow-tab', async (_event, payload?: { focus?: boolean; reveal?: boolean; accountId?: string }) => { + await openFlowWindow({ + focus: payload?.focus, + reveal: payload?.reveal, + accountId: payload?.accountId, + }) + return getFlowPanelStatePayload() +}) +ipcMain.handle('flow-panel-get-state', () => getFlowPanelStatePayload()) +ipcMain.handle('flow-panel-set-visible', async (_event, payload?: { visible?: boolean; persist?: boolean; revealFlowIfNeeded?: boolean }) => { + return await setFlowPanelVisibility(Boolean(payload?.visible), { + persist: payload?.persist !== false, + revealFlowIfNeeded: payload?.revealFlowIfNeeded === true, + }) +}) +ipcMain.handle('flow-panel-toggle', async () => { + const currentlyVisible = isFlowSidebarActuallyVisible() + const nextVisible = !currentlyVisible + return await setFlowPanelVisibility(nextVisible, { + persist: true, + revealFlowIfNeeded: nextVisible, + }) +}) +ipcMain.handle('flow-accounts-list', () => listFlowAccountsPayload()) +ipcMain.handle('flow-accounts-create', (_event, payload?: { id?: string; label?: string; email?: string; setActive?: boolean }) => + createFlowAccount(payload) +) +ipcMain.handle('flow-accounts-update', (_event, payload?: { id?: string; label?: string; email?: string }) => + updateFlowAccount(payload) +) +ipcMain.handle('flow-accounts-delete', async (_event, accountId?: string) => + await deleteFlowAccount(accountId) +) +ipcMain.handle('flow-accounts-set-active', async (_event, payload?: { id?: string; openFlow?: boolean; focus?: boolean }) => + await setActiveFlowAccount(payload?.id, { openFlow: payload?.openFlow, focus: payload?.focus }) +) +ipcMain.handle('flow-accounts-logout', async (_event, payload?: { id?: string; reopenFlow?: boolean; focus?: boolean }) => { + const account = getFlowAccountById(payload?.id) + if (flowSessionPartition === account.partition) { + destroyFlowWindowsForAccountSwitch() + } + await clearFlowAccountSession(account.id) + await unloadExtensionForPartition(account.partition) + flowSessionPartition = '' + flowExtensionId = null + if (payload?.reopenFlow !== false) { + await openFlowWindow({ + accountId: account.id, + reveal: true, + focus: payload?.focus ?? true, + forceRecreate: true, + }) + } + return listFlowAccountsPayload() +}) +ipcMain.handle('get-app-info', () => ({ + name: app.getName(), + version: app.getVersion(), +})) +ipcMain.handle('get-health', async () => { + try { + const res = await fetch('http://127.0.0.1:8100/health') + return res.json() + } catch { + return { status: 'error', extension_connected: false } + } +}) +ipcMain.handle('get-machine-id', async () => getMachineId()) +ipcMain.handle('get-license-config', async () => getLicenseConfig()) +ipcMain.handle('set-license-config', async (_event, apiBaseUrl: string) => { + const config = await saveLicenseConfig(LICENSE_CONFIG_PATH, apiBaseUrl, DEFAULT_LICENSE_API) + lastLicenseCheck = null + return config +}) +ipcMain.handle('check-license', async (_event, payload?: { force?: boolean }) => { + const result = await performLicenseCheck(Boolean(payload?.force)) + pushLicenseStatusToRenderer(result) + return result +}) +ipcMain.handle('pick-image-file', async () => { + const result = await dialog.showOpenDialog(mainWindow ?? undefined, { + properties: ['openFile'], + filters: [ + { name: 'Images', extensions: ['png', 'jpg', 'jpeg', 'webp'] }, + { name: 'All Files', extensions: ['*'] }, + ], + }) + if (result.canceled || result.filePaths.length === 0) return null + return result.filePaths[0] +}) +ipcMain.handle('pick-file', async (_event, kind: 'image' | 'audio' | 'video' | 'any' = 'any') => { + const filters = kind === 'image' + ? [ + { name: 'Images', extensions: ['png', 'jpg', 'jpeg', 'webp'] }, + { name: 'All Files', extensions: ['*'] }, + ] + : kind === 'audio' + ? [ + { name: 'Audio', extensions: ['wav', 'mp3', 'm4a', 'aac', 'flac', 'ogg'] }, + { name: 'All Files', extensions: ['*'] }, + ] + : kind === 'video' + ? [ + { name: 'Video', extensions: ['mp4', 'mov', 'mkv', 'webm'] }, + { name: 'All Files', extensions: ['*'] }, + ] + : [{ name: 'All Files', extensions: ['*'] }] + const result = await dialog.showOpenDialog(mainWindow ?? undefined, { + properties: ['openFile'], + filters, + }) + if (result.canceled || result.filePaths.length === 0) return null + return result.filePaths[0] +}) +ipcMain.handle('pick-directory', async () => { + const result = await dialog.showOpenDialog(mainWindow ?? undefined, { + properties: ['openDirectory', 'createDirectory'], + }) + if (result.canceled || result.filePaths.length === 0) return null + return result.filePaths[0] +}) +ipcMain.handle('open-path', async (_event, targetPath: string) => { + if (!targetPath) return { ok: false, error: 'Path is required' } + const error = await shell.openPath(targetPath) + if (error) return { ok: false, error } + return { ok: true } +}) +ipcMain.handle('window-minimize', () => { + if (!mainWindow || mainWindow.isDestroyed()) return + mainWindow.minimize() +}) +ipcMain.handle('window-toggle-maximize', () => { + if (!mainWindow || mainWindow.isDestroyed()) return false + if (mainWindow.isMaximized()) mainWindow.unmaximize() + else mainWindow.maximize() + return mainWindow.isMaximized() +}) +ipcMain.handle('window-close', () => { + if (!mainWindow || mainWindow.isDestroyed()) return + mainWindow.close() +}) +ipcMain.handle('window-is-maximized', () => { + if (!mainWindow || mainWindow.isDestroyed()) return false + return mainWindow.isMaximized() +}) + +ipcMain.on('agent-ready', () => { + // Forward to renderer + mainWindow?.webContents.send('agent-status', 'ready') +}) + +ipcMain.handle('reconnect-extension', async () => { + try { + // Ensure Flow window (and extension side panel) is alive. + await openFlowWindow({ focus: false, reveal: false }) + createFlowSidebarWindow(false) + await waitForWebContentsReady(flowSidebarWindow?.webContents, 6000) + await waitForWebContentsReady(flowWindow?.webContents, 6000) + + for (let i = 0; i < 3; i += 1) { + const sent = await requestExtensionReconnectViaSidebar() + if (sent) { + const connected = await waitForExtensionConnected(3500) + if (connected) return { ok: true, method: 'runtimeMessage' } + } + await sleep(600) + } + + // Find the extension background service worker webContents + const allContents = (session.defaultSession as any).getAllWebContents?.() + ?? require('electron').webContents.getAllWebContents() + const bgContents = allContents.find((wc: Electron.WebContents) => { + const url = wc.getURL?.() ?? '' + return flowExtensionId + ? url.includes(`chrome-extension://${flowExtensionId}`) && url.includes('background') + : url.startsWith('chrome-extension://') && url.includes('background') + }) + if (bgContents && !bgContents.isDestroyed()) { + try { + await bgContents.executeJavaScript(` + try { + manualDisconnect = false; + connectToAgent(); + } catch(e) {} + `) + const connected = await waitForExtensionConnected(4500) + if (connected) return { ok: true, method: 'executeJavaScript' } + } catch (err) { + console.warn('[main] reconnect via background worker failed:', err) + } + } + + // Fallback: reload extension via session + if (flowExtensionId) { + const flowSession = getCurrentFlowSession() + const extHost = getExtensionsHost(flowSession) + if (typeof extHost.reloadExtension === 'function') { + await extHost.reloadExtension(flowExtensionId) + // Ensure side panel points to the latest extension runtime. + if (flowSidebarWindow && !flowSidebarWindow.webContents.isDestroyed()) { + const sidePanelUrl = `chrome-extension://${flowExtensionId}/side_panel.html` + await flowSidebarWindow.webContents.loadURL(sidePanelUrl) + await waitForWebContentsReady(flowSidebarWindow.webContents, 6000) + } + const sent = await requestExtensionReconnectViaSidebar() + const connected = sent ? await waitForExtensionConnected(5000) : false + if (connected) return { ok: true, method: 'reloadExtension' } + } + } + return { ok: false, error: 'Extension vẫn OFF sau nhiều lần reconnect' } + } catch (e: any) { + return { ok: false, error: e?.message ?? String(e) } + } +}) + +// ─── App Lifecycle ─────────────────────────────────────────── + +app.whenReady().then(async () => { + // Send status while loading + try { + await prepareFlowRuntimeForAccount(flowAccountsConfig.activeAccountId) + } catch (err) { + console.error('[main] Failed to prepare default Flow runtime:', err) + } + createMainWindow() + createTray() + // Keep Flow window available for captcha/token flows. + try { + await openFlowWindow({ focus: false, reveal: false, accountId: flowAccountsConfig.activeAccountId }) + } catch (err) { + console.error('[main] Failed to auto-open Flow window:', err) + } + + startLicenseEnforcer() + + // Start Python sidecar + sidecar.start() + + sidecar.on('status', (status: string) => { + mainWindow?.webContents.send('agent-status', status) + }) +}) + +app.on('second-instance', () => { + if (mainWindow && !mainWindow.isDestroyed()) { + if (mainWindow.isMinimized()) mainWindow.restore() + mainWindow.show() + mainWindow.focus() + } else { + createMainWindow() + } +}) + +app.on('render-process-gone', (_event, webContents, details) => { + console.error('[main] render-process-gone:', { + reason: details.reason, + exitCode: details.exitCode, + url: webContents.getURL(), + }) +}) + +app.on('child-process-gone', (_event, details) => { + console.error('[main] child-process-gone:', details) +}) + +app.on('window-all-closed', () => { + // Keep running in tray + if (process.platform !== 'darwin') { + // On Windows, only quit when tray is exited + } +}) + +app.on('before-quit', () => { + appQuitting = true + stopLicenseEnforcer() + sidecar.stop() +}) + +app.on('activate', () => { + if (!mainWindow) createMainWindow() + else { + mainWindow.show() + mainWindow.focus() + } +}) diff --git a/desktop/electron/preload.ts b/desktop/electron/preload.ts new file mode 100644 index 0000000..3857964 --- /dev/null +++ b/desktop/electron/preload.ts @@ -0,0 +1,150 @@ +import { contextBridge, ipcRenderer } from 'electron' + +type FlowPanelState = { + visible: boolean + sidebarReady: boolean + flowReady: boolean + requestedVisible?: boolean +} + +contextBridge.exposeInMainWorld('electron', { + /** Open the Google Flow browser window */ + openFlowTab: (options?: { focus?: boolean; reveal?: boolean; accountId?: string }) => ipcRenderer.invoke('open-flow-tab', options) as Promise, + /** Flow extension side panel visibility */ + getFlowPanelState: () => ipcRenderer.invoke('flow-panel-get-state') as Promise, + setFlowPanelVisible: (visible: boolean, options?: { revealFlowIfNeeded?: boolean }) => ipcRenderer.invoke('flow-panel-set-visible', { visible, revealFlowIfNeeded: options?.revealFlowIfNeeded === true }) as Promise<{ + visible: boolean + sidebarReady: boolean + flowReady: boolean + requestedVisible?: boolean + }>, + toggleFlowPanel: () => ipcRenderer.invoke('flow-panel-toggle') as Promise, + /** Google Flow account/session profiles */ + flowAccountsList: () => ipcRenderer.invoke('flow-accounts-list') as Promise<{ + activeAccountId: string + accounts: Array<{ id: string; label: string; email: string; partition: string; createdAt: string; updatedAt: string }> + }>, + flowAccountsCreate: (payload?: { id?: string; label?: string; email?: string; setActive?: boolean }) => + ipcRenderer.invoke('flow-accounts-create', payload) as Promise<{ + activeAccountId: string + accounts: Array<{ id: string; label: string; email: string; partition: string; createdAt: string; updatedAt: string }> + }>, + flowAccountsUpdate: (payload: { id: string; label?: string; email?: string }) => + ipcRenderer.invoke('flow-accounts-update', payload) as Promise<{ + activeAccountId: string + accounts: Array<{ id: string; label: string; email: string; partition: string; createdAt: string; updatedAt: string }> + }>, + flowAccountsDelete: (id: string) => + ipcRenderer.invoke('flow-accounts-delete', id) as Promise<{ + activeAccountId: string + accounts: Array<{ id: string; label: string; email: string; partition: string; createdAt: string; updatedAt: string }> + }>, + flowAccountsSetActive: (payload: { id: string; openFlow?: boolean; focus?: boolean }) => + ipcRenderer.invoke('flow-accounts-set-active', payload) as Promise<{ + activeAccountId: string + accounts: Array<{ id: string; label: string; email: string; partition: string; createdAt: string; updatedAt: string }> + }>, + flowAccountsLogout: (payload: { id: string; reopenFlow?: boolean; focus?: boolean }) => + ipcRenderer.invoke('flow-accounts-logout', payload) as Promise<{ + activeAccountId: string + accounts: Array<{ id: string; label: string; email: string; partition: string; createdAt: string; updatedAt: string }> + }>, + /** Basic app info from Electron main process */ + getAppInfo: () => ipcRenderer.invoke('get-app-info') as Promise<{ name: string; version: string }>, + /** Stable machine id used for license activation */ + getMachineId: () => ipcRenderer.invoke('get-machine-id') as Promise, + /** Get/Set license API endpoint */ + getLicenseConfig: () => ipcRenderer.invoke('get-license-config') as Promise<{ apiBaseUrl: string }>, + setLicenseConfig: (apiBaseUrl: string) => ipcRenderer.invoke('set-license-config', apiBaseUrl) as Promise<{ apiBaseUrl: string }>, + /** Check current device license status */ + getLicenseStatus: (force = false) => + ipcRenderer.invoke('check-license', { force }) as Promise<{ + allowed: boolean + status: 'ACTIVE' | 'EXPIRED' | 'REVOKED' | 'PENDING' | 'ERROR' + machineId: string + machineHash: string | null + planCode: string | null + planLabel: string | null + activatedAt: string | null + expiresAt: string | null + revokedReason: string | null + checkedAt: string + serverTime: string | null + source: 'remote' | 'cache' + apiBaseUrl: string + message: string + }>, + + /** Get current health status from Python agent */ + getHealth: () => ipcRenderer.invoke('get-health'), + + /** Reconnect Chrome extension WebSocket to agent */ + reconnectExtension: () => ipcRenderer.invoke('reconnect-extension') as Promise<{ ok: boolean; method?: string; error?: string }>, + + /** Pick a local image file and return absolute path */ + pickImageFile: () => ipcRenderer.invoke('pick-image-file') as Promise, + /** Pick a local file by media kind */ + pickFile: (kind: 'image' | 'audio' | 'video' | 'any' = 'any') => + ipcRenderer.invoke('pick-file', kind) as Promise, + /** Pick a local directory path */ + pickDirectory: () => ipcRenderer.invoke('pick-directory') as Promise, + /** Reveal a file/folder path in OS shell */ + openPath: (targetPath: string) => ipcRenderer.invoke('open-path', targetPath) as Promise<{ ok: boolean; error?: string }>, + + /** Subscribe to agent sidecar status updates */ + onAgentStatus: (callback: (status: string) => void) => { + const handler = (_event: Electron.IpcRendererEvent, status: string) => callback(status) + ipcRenderer.on('agent-status', handler) + return () => ipcRenderer.removeListener('agent-status', handler) + }, + onFlowPanelStateChanged: (callback: (state: FlowPanelState) => void) => { + const handler = (_event: Electron.IpcRendererEvent, state: FlowPanelState) => callback(state) + ipcRenderer.on('flow-panel-state-changed', handler) + return () => ipcRenderer.removeListener('flow-panel-state-changed', handler) + }, + /** Subscribe to license status changes pushed by main process */ + onLicenseStatusChanged: (callback: (status: { + allowed: boolean + status: 'ACTIVE' | 'EXPIRED' | 'REVOKED' | 'PENDING' | 'ERROR' + machineId: string + machineHash: string | null + planCode: string | null + planLabel: string | null + activatedAt: string | null + expiresAt: string | null + revokedReason: string | null + checkedAt: string + serverTime: string | null + source: 'remote' | 'cache' + apiBaseUrl: string + message: string + }) => void) => { + const handler = (_event: Electron.IpcRendererEvent, status: { + allowed: boolean + status: 'ACTIVE' | 'EXPIRED' | 'REVOKED' | 'PENDING' | 'ERROR' + machineId: string + machineHash: string | null + planCode: string | null + planLabel: string | null + activatedAt: string | null + expiresAt: string | null + revokedReason: string | null + checkedAt: string + serverTime: string | null + source: 'remote' | 'cache' + apiBaseUrl: string + message: string + }) => callback(status) + ipcRenderer.on('license-status-changed', handler) + return () => ipcRenderer.removeListener('license-status-changed', handler) + }, + + /** Platform info */ + platform: process.platform, + + /** Window controls */ + windowMinimize: () => ipcRenderer.invoke('window-minimize'), + windowToggleMaximize: () => ipcRenderer.invoke('window-toggle-maximize'), + windowClose: () => ipcRenderer.invoke('window-close'), + isWindowMaximized: () => ipcRenderer.invoke('window-is-maximized') as Promise, +}) diff --git a/desktop/electron/sidecar.ts b/desktop/electron/sidecar.ts new file mode 100644 index 0000000..6d95b5b --- /dev/null +++ b/desktop/electron/sidecar.ts @@ -0,0 +1,519 @@ +import { ChildProcess, execSync, spawn } from 'child_process' +import { delimiter, join } from 'path' +import { existsSync } from 'fs' +import { app } from 'electron' +import { EventEmitter } from 'events' + +const AGENT_PORT = 8100 +const HEALTH_URL = `http://127.0.0.1:${AGENT_PORT}/health` +const MANUAL_CONTEXT_URL = `http://127.0.0.1:${AGENT_PORT}/api/flow/manual/context` +const DOWNLOAD_ASSETS_PROBE_URL = `http://127.0.0.1:${AGENT_PORT}/api/videos/__feature_probe__/download-assets` +const SCRIPT_EXPORT_PROBE_URL = `http://127.0.0.1:${AGENT_PORT}/api/videos/__feature_probe__/script-export` +const SCRIPT_IMPORT_PROBE_URL = `http://127.0.0.1:${AGENT_PORT}/api/videos/__feature_probe__/script-import` +const MAX_RETRIES = 5 +const RESTART_DELAY_MS = 3000 +const HEALTH_POLL_MS = 600 +const HEALTH_TIMEOUT_MS = 20000 + +class Sidecar extends EventEmitter { + private process: ChildProcess | null = null + private stopping = false + private restartCount = 0 + /** Set to true when the process exits, so _waitForReady know to bail out */ + private abortReady = false + private resolvingPortConflict = false + + async start() { + this.stopping = false + // If port is already occupied by an external instance, adopt only if compatible. + const alreadyUp = await this._checkHealth() + if (alreadyUp) { + const compatible = await this._checkCompatibility() + if (compatible) { + const preferred = this._isPreferredAgentOnPort(AGENT_PORT) + if (preferred) { + console.log('[sidecar] External compatible agent already running — adopting.') + this.emit('status', 'Ready') + return + } + + console.warn('[sidecar] Compatible agent detected on :8100 but not preferred runtime — replacing it.') + const replaced = await this._replaceIncompatibleProcess() + if (!replaced) { + this.emit('status', 'Error — port 8100 occupied by external compatible process') + return + } + this._spawn() + return + } + + console.warn('[sidecar] External agent on :8100 is incompatible — trying to replace it.') + const replaced = await this._replaceIncompatibleProcess() + if (!replaced) { + this.emit('status', 'Error — port 8100 occupied by incompatible process') + return + } + } + this._spawn() + } + + stop() { + this.stopping = true + if (this.process) { + console.log('[sidecar] Stopping Python agent...') + this.process.kill('SIGTERM') + setTimeout(() => this.process?.kill('SIGKILL'), 3000) + this.process = null + } + } + + private _spawn() { + const { bin, args, cwd } = this._resolveCommand() + if (!existsSync(bin)) { + const missingName = process.platform === 'win32' ? 'flowkit-agent.exe' : 'flowkit-agent' + console.error('[sidecar] Agent binary missing:', { bin, cwd }) + this.emit('status', `Error — missing ${missingName}`) + return + } + console.log('[sidecar] Spawning:', bin, args.join(' '), 'cwd:', cwd) + + this.abortReady = false + this.emit('status', 'Starting...') + + const sidecarEnv: NodeJS.ProcessEnv = { ...process.env, PYTHONDONTWRITEBYTECODE: '1' } + this._injectLocalUpscaleRuntimeEnv(sidecarEnv) + + this.process = spawn(bin, args, { + cwd, + env: sidecarEnv, + stdio: ['ignore', 'pipe', 'pipe'] + }) + + this.process.on('error', async (err: NodeJS.ErrnoException) => { + console.error('[sidecar] Spawn failed:', err) + this.abortReady = true + + // If another agent is already healthy, adopt it instead of failing hard. + const alreadyUp = await this._checkHealth() + if (alreadyUp) { + const compatible = await this._checkCompatibility() + if (compatible) { + console.log('[sidecar] Spawn failed but external compatible agent is healthy — adopting.') + this.emit('status', 'Ready') + return + } + + console.warn('[sidecar] Spawn failed and external agent is incompatible — trying to replace.') + const replaced = await this._replaceIncompatibleProcess() + if (replaced) { + this.stopping = false + this.abortReady = false + this._spawn() + return + } + } + + const hint = err.code === 'ENOENT' + ? 'agent binary/python not found' + : (String(err?.message || '').toLowerCase().includes('not a valid win32 application') + ? 'invalid windows agent binary' + : (err.code || 'spawn error')) + this.emit('status', `Error — ${hint}`) + }) + + this.process.stdout?.on('data', (d: Buffer) => { + process.stdout.write(`[agent] ${d}`) + }) + this.process.stderr?.on('data', (d: Buffer) => { + process.stderr.write(`[agent] ${d}`) + // Detect port-already-in-use and try to replace incompatible stale agent. + const msg = d.toString() + if (/address already in use|eaddrinuse/i.test(msg) && !this.resolvingPortConflict) { + this.resolvingPortConflict = true + this.abortReady = true // cancel _waitForReady loop while resolving conflict + void this._handlePortConflict() + } + }) + + this.process.on('exit', (code) => { + console.log(`[sidecar] Process exited with code ${code}`) + this.abortReady = true // stop _waitForReady polling + this.process = null + + if (!this.stopping) { + this.emit('status', `Stopped (exit ${code})`) + if (this.restartCount < MAX_RETRIES) { + this.restartCount++ + console.log(`[sidecar] Restarting in ${RESTART_DELAY_MS}ms (attempt ${this.restartCount}/${MAX_RETRIES})`) + setTimeout(() => { + if (!this.stopping) { + this.abortReady = false + this._spawn() + } + }, RESTART_DELAY_MS) + } else { + this.emit('status', 'Error — max restarts reached') + } + } + }) + + // Poll health until ready (or process exits) + this._waitForReady() + } + + private async _handlePortConflict() { + try { + console.warn('[sidecar] Port 8100 already in use — checking compatibility.') + this.stopping = true // suppress auto-restart while we resolve conflict + + // Give current process a moment to exit after bind failure. + await this._sleep(500) + + const healthy = await this._checkHealth() + if (healthy) { + const compatible = await this._checkCompatibility() + if (compatible) { + console.log('[sidecar] External compatible agent detected — adopting.') + this.emit('status', 'Ready') + return + } + } + + const replaced = await this._replaceIncompatibleProcess() + if (!replaced) { + this.emit('status', 'Error — port 8100 occupied by incompatible process') + return + } + + console.log('[sidecar] Incompatible process removed — restarting sidecar.') + this.stopping = false + this.abortReady = false + this._spawn() + } finally { + this.resolvingPortConflict = false + } + } + + private async _waitForReady() { + const deadline = Date.now() + HEALTH_TIMEOUT_MS + while (Date.now() < deadline) { + if (this.abortReady) { + console.log('[sidecar] _waitForReady aborted (process exited or port adopted)') + return + } + const ok = await this._checkHealth() + if (ok) { + if (!this.abortReady) { + console.log('[sidecar] Agent is ready') + this.restartCount = 0 + this.emit('status', 'Ready') + } + return + } + await new Promise(r => setTimeout(r, HEALTH_POLL_MS)) + } + if (!this.abortReady) { + console.error('[sidecar] Agent failed to start within timeout') + this.emit('status', 'Error — timeout') + } + } + + private async _checkHealth(): Promise { + try { + const res = await fetch(HEALTH_URL, { signal: AbortSignal.timeout(1500) }) + return res.ok + } catch { + return false + } + } + + private async _checkCompatibility(): Promise { + try { + const manualContextRes = await fetch(MANUAL_CONTEXT_URL, { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ create_if_missing: false }), + signal: AbortSignal.timeout(2000), + }) + // Endpoint exists => 200/4xx/5xx except 404. + if (manualContextRes.status === 404) return false + + // Route shape probe for new local-download feature. + // Existing route should return 405 on GET (method not allowed), + // but never 404 when endpoint is present. + const downloadProbeRes = await fetch(DOWNLOAD_ASSETS_PROBE_URL, { + method: 'GET', + signal: AbortSignal.timeout(2000), + }) + if (downloadProbeRes.status === 404) return false + + // Script export route probe. + // Expected behavior with dummy vid: 404 + {"detail":"Video not found"}. + const scriptExportRes = await fetch(SCRIPT_EXPORT_PROBE_URL, { + method: 'GET', + signal: AbortSignal.timeout(2000), + }) + if (scriptExportRes.status === 404) { + const raw = await scriptExportRes.text().catch(() => '') + const isVideoNotFound = raw.includes('Video not found') + if (!isVideoNotFound) return false + } else if (!scriptExportRes.ok && scriptExportRes.status !== 400 && scriptExportRes.status !== 422) { + // Allow non-2xx but reject hard routing/transport incompatibilities. + return false + } + + // Script import route probe. + // Existing route should return 405 on GET (method not allowed), never 404. + const scriptImportRes = await fetch(SCRIPT_IMPORT_PROBE_URL, { + method: 'GET', + signal: AbortSignal.timeout(2000), + }) + if (scriptImportRes.status === 404) return false + + return true + } catch { + return false + } + } + + private async _replaceIncompatibleProcess(): Promise { + const pids = this._listListeningPids(AGENT_PORT) + if (pids.length === 0) return true + + for (const pid of pids) { + const cmd = this._readPidCommand(pid) + if (!this._isLikelyFlowKitAgentCommand(cmd)) { + console.error(`[sidecar] Refusing to kill non-FlowKit process on :${AGENT_PORT} (pid=${pid}, cmd="${cmd}")`) + return false + } + } + + for (const pid of pids) { + try { + console.warn(`[sidecar] Terminating stale FlowKit agent pid=${pid}`) + process.kill(pid, 'SIGTERM') + } catch (err) { + console.error(`[sidecar] Failed to SIGTERM pid=${pid}:`, err) + } + } + + await this._sleep(1200) + let remaining = this._listListeningPids(AGENT_PORT) + if (remaining.length > 0) { + for (const pid of remaining) { + try { + console.warn(`[sidecar] Force killing stale FlowKit agent pid=${pid}`) + process.kill(pid, 'SIGKILL') + } catch (err) { + console.error(`[sidecar] Failed to SIGKILL pid=${pid}:`, err) + } + } + } + + // Some macOS processes linger briefly after SIGKILL. Wait a bit longer before + // declaring failure, and only fail hard if a non-FlowKit process is still binding. + for (let attempt = 0; attempt < 8; attempt += 1) { + await this._sleep(400) + remaining = this._listListeningPids(AGENT_PORT) + if (remaining.length === 0) return true + const nonFlowKit = remaining.find((pid) => !this._isLikelyFlowKitAgentCommand(this._readPidCommand(pid))) + if (nonFlowKit) { + console.error(`[sidecar] Port ${AGENT_PORT} still occupied by non-FlowKit process pid=${nonFlowKit}`) + return false + } + if (attempt >= 3) { + for (const pid of remaining) { + try { process.kill(pid, 'SIGKILL') } catch { /* noop */ } + } + } + } + + return this._listListeningPids(AGENT_PORT).length === 0 + } + + private _listListeningPids(port: number): number[] { + if (process.platform === 'win32') { + try { + const out = execSync(`netstat -ano -p tcp | findstr LISTENING | findstr :${port}`, { encoding: 'utf8' }).trim() + if (!out) return [] + const pids = out + .split(/\r?\n/) + .map((line) => line.trim().split(/\s+/)) + .filter((parts) => parts.length >= 5) + .map((parts) => Number.parseInt(parts[parts.length - 1], 10)) + .filter((pid) => Number.isFinite(pid) && pid > 0) + return Array.from(new Set(pids)) + } catch { + return [] + } + } + try { + const out = execSync(`lsof -n -P -ti tcp:${port} -sTCP:LISTEN`, { encoding: 'utf8' }).trim() + if (!out) return [] + return out + .split('\n') + .map(v => Number.parseInt(v.trim(), 10)) + .filter(v => Number.isFinite(v) && v > 0) + } catch { + return [] + } + } + + private _readPidCommand(pid: number): string { + if (!Number.isFinite(pid) || pid <= 0) return '' + if (process.platform === 'win32') { + try { + const cmd = execSync( + `powershell -NoProfile -Command "(Get-CimInstance Win32_Process -Filter \\"ProcessId=${pid}\\").CommandLine"`, + { encoding: 'utf8' }, + ).trim() + if (cmd) return cmd + } catch { /* noop */ } + try { + const out = execSync(`tasklist /FI "PID eq ${pid}" /FO LIST`, { encoding: 'utf8' }).trim() + return out + } catch { + return '' + } + } + try { + return execSync(`ps -p ${pid} -o command=`, { encoding: 'utf8' }).trim() + } catch { + return '' + } + } + + private _isLikelyFlowKitAgentCommand(cmd: string): boolean { + const lower = cmd.toLowerCase() + return lower.includes('agent.main') || lower.includes('flowkit-agent') + } + + private _isPreferredAgentOnPort(port: number): boolean { + const pids = this._listListeningPids(port) + if (pids.length === 0) return false + return pids.some((pid) => this._isPreferredAgentCommand(this._readPidCommand(pid))) + } + + private _isPreferredAgentCommand(cmd: string): boolean { + const lower = cmd.toLowerCase() + if (!lower) return false + + if (app.isPackaged) { + return lower.includes('flowkit-agent') + } + + const projectRoot = join(__dirname, '../../..') + const expectedPython = process.platform === 'win32' + ? join(projectRoot, 'venv', 'Scripts', 'python.exe') + : join(projectRoot, 'venv', 'bin', 'python3') + + return lower.includes(expectedPython.toLowerCase()) && lower.includes('agent.main') + } + + private _sleep(ms: number): Promise { + return new Promise(resolve => setTimeout(resolve, ms)) + } + + private _resolveCommand(): { bin: string; args: string[]; cwd: string } { + if (app.isPackaged) { + const ext = process.platform === 'win32' ? '.exe' : '' + const candidates = [ + join(process.resourcesPath, 'agent', `flowkit-agent${ext}`), + join(process.resourcesPath, 'agent', 'flowkit-agent'), + join(process.resourcesPath, `flowkit-agent${ext}`), + join(process.resourcesPath, 'flowkit-agent'), + ] + const bin = candidates.find((p) => existsSync(p)) || candidates[0] + return { bin, args: [], cwd: process.resourcesPath } + } else { + const projectRoot = join(__dirname, '../../..') + const venvPython = process.platform === 'win32' + ? join(projectRoot, 'venv', 'Scripts', 'python.exe') + : join(projectRoot, 'venv', 'bin', 'python3') + return { + bin: venvPython, + args: ['-m', 'agent.main'], + cwd: projectRoot + } + } + } + + private _injectLocalUpscaleRuntimeEnv(sidecarEnv: NodeJS.ProcessEnv) { + const projectRoot = join(__dirname, '../../..') + const platformFolder = process.platform === 'win32' + ? 'win32' + : (process.platform === 'darwin' ? 'darwin' : process.platform) + const runtimeBaseRoots = app.isPackaged + ? [ + join(process.resourcesPath, 'agent', 'third_party'), + join(process.resourcesPath, 'third_party'), + ] + : [ + join(projectRoot, 'third_party'), + join(projectRoot, 'desktop', 'resources', 'agent', 'third_party'), + ] + const runtimeRoots = runtimeBaseRoots.flatMap((baseRoot) => [ + join(baseRoot, platformFolder), + baseRoot, + ]) + + const ffmpegName = process.platform === 'win32' ? 'ffmpeg.exe' : 'ffmpeg' + const ffprobeName = process.platform === 'win32' ? 'ffprobe.exe' : 'ffprobe' + const realesrganName = process.platform === 'win32' ? 'realesrgan-ncnn-vulkan.exe' : 'realesrgan-ncnn-vulkan' + const modelParam = 'realesrgan-x4plus.param' + const modelBin = 'realesrgan-x4plus.bin' + + let chosenRoot = '' + for (const root of runtimeRoots) { + const ffmpegBin = join(root, 'ffmpeg', ffmpegName) + const ffprobeBin = join(root, 'ffmpeg', ffprobeName) + const realesrganBin = join(root, 'realesrgan', realesrganName) + const modelDir = join(root, 'realesrgan', 'models') + const hasAll = + existsSync(ffmpegBin) && + existsSync(ffprobeBin) && + existsSync(realesrganBin) && + existsSync(join(modelDir, modelParam)) && + existsSync(join(modelDir, modelBin)) + if (hasAll) { + chosenRoot = root + break + } + } + + if (!chosenRoot) { + chosenRoot = runtimeRoots.find((root) => existsSync(root)) || '' + } + if (!chosenRoot) return + + const ffmpegDir = join(chosenRoot, 'ffmpeg') + const realesrganDir = join(chosenRoot, 'realesrgan') + const ffmpegBin = join(ffmpegDir, ffmpegName) + const ffprobeBin = join(ffmpegDir, ffprobeName) + const realesrganBin = join(realesrganDir, realesrganName) + const modelDir = join(realesrganDir, 'models') + + if (existsSync(ffmpegBin)) sidecarEnv.LOCAL_UPSCALE_FFMPEG = ffmpegBin + if (existsSync(ffprobeBin)) sidecarEnv.LOCAL_UPSCALE_FFPROBE = ffprobeBin + if (existsSync(realesrganBin)) sidecarEnv.LOCAL_UPSCALE_BIN = realesrganBin + if (existsSync(join(modelDir, modelParam)) && existsSync(join(modelDir, modelBin))) { + sidecarEnv.LOCAL_UPSCALE_MODEL_DIR = modelDir + } + sidecarEnv.LOCAL_UPSCALE_RUNTIME_ROOT = chosenRoot + + const pathParts: string[] = [] + if (existsSync(ffmpegDir)) pathParts.push(ffmpegDir) + if (existsSync(realesrganDir)) pathParts.push(realesrganDir) + const currentPath = process.env.PATH ?? process.env.Path ?? '' + if (currentPath) pathParts.push(currentPath) + if (pathParts.length > 0) { + const mergedPath = pathParts.join(delimiter) + sidecarEnv.PATH = mergedPath + sidecarEnv.Path = mergedPath + } + + console.log('[sidecar] Local upscale runtime root:', chosenRoot) + } +} + +export const sidecar = new Sidecar() diff --git a/desktop/package-lock.json b/desktop/package-lock.json new file mode 100644 index 0000000..f73c120 --- /dev/null +++ b/desktop/package-lock.json @@ -0,0 +1,9265 @@ +{ + "name": "flowkit-desktop", + "version": "0.2.0", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "flowkit-desktop", + "version": "0.2.0", + "dependencies": { + "@fontsource/inter": "^5.2.8", + "@radix-ui/react-dialog": "^1.1.15", + "@radix-ui/react-dropdown-menu": "^2.1.16", + "@radix-ui/react-label": "^2.1.8", + "@radix-ui/react-popover": "^1.1.15", + "@radix-ui/react-progress": "^1.1.8", + "@radix-ui/react-scroll-area": "^1.2.10", + "@radix-ui/react-select": "^2.2.6", + "@radix-ui/react-separator": "^1.1.8", + "@radix-ui/react-slot": "^1.2.4", + "@radix-ui/react-switch": "^1.2.6", + "@radix-ui/react-tabs": "^1.1.13", + "@radix-ui/react-toast": "^1.2.15", + "@radix-ui/react-tooltip": "^1.2.8", + "class-variance-authority": "^0.7.1", + "clsx": "^2.1.1", + "electron-updater": "^6.3.4", + "lucide-react": "^1.7.0", + "react": "^19.2.4", + "react-dom": "^19.2.4", + "react-router-dom": "^7.14.0", + "tailwind-merge": "^3.5.0" + }, + "devDependencies": { + "@electron-toolkit/preload": "^3.0.1", + "@electron-toolkit/utils": "^3.0.0", + "@tailwindcss/vite": "^4.2.2", + "@types/node": "^24.12.0", + "@types/react": "^19.2.14", + "@types/react-dom": "^19.2.3", + "@vitejs/plugin-react": "^4.3.4", + "electron": "^36.1.0", + "electron-builder": "^25.1.8", + "electron-vite": "^2.3.0", + "tailwindcss": "^4.2.2", + "typescript": "~5.9.3", + "vite": "^6.3.3" + } + }, + "node_modules/@babel/code-frame": { + "version": "7.29.0", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.29.0.tgz", + "integrity": "sha512-9NhCeYjq9+3uxgdtp20LSiJXJvN0FeCtNGpJxuMFZ1Kv3cWUNb6DOhJwUvcVCzKGR66cw4njwM6hrJLqgOwbcw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-validator-identifier": "^7.28.5", + "js-tokens": "^4.0.0", + "picocolors": "^1.1.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/compat-data": { + "version": "7.29.0", + "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.29.0.tgz", + "integrity": "sha512-T1NCJqT/j9+cn8fvkt7jtwbLBfLC/1y1c7NtCeXFRgzGTsafi68MRv8yzkYSapBnFA6L3U2VSc02ciDzoAJhJg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/core": { + "version": "7.29.0", + "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.29.0.tgz", + "integrity": "sha512-CGOfOJqWjg2qW/Mb6zNsDm+u5vFQ8DxXfbM09z69p5Z6+mE1ikP2jUXw+j42Pf1XTYED2Rni5f95npYeuwMDQA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/code-frame": "^7.29.0", + "@babel/generator": "^7.29.0", + "@babel/helper-compilation-targets": "^7.28.6", + "@babel/helper-module-transforms": "^7.28.6", + "@babel/helpers": "^7.28.6", + "@babel/parser": "^7.29.0", + "@babel/template": "^7.28.6", + "@babel/traverse": "^7.29.0", + "@babel/types": "^7.29.0", + "@jridgewell/remapping": "^2.3.5", + "convert-source-map": "^2.0.0", + "debug": "^4.1.0", + "gensync": "^1.0.0-beta.2", + "json5": "^2.2.3", + "semver": "^6.3.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/babel" + } + }, + "node_modules/@babel/generator": { + "version": "7.29.1", + "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.29.1.tgz", + "integrity": "sha512-qsaF+9Qcm2Qv8SRIMMscAvG4O3lJ0F1GuMo5HR/Bp02LopNgnZBC/EkbevHFeGs4ls/oPz9v+Bsmzbkbe+0dUw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/parser": "^7.29.0", + "@babel/types": "^7.29.0", + "@jridgewell/gen-mapping": "^0.3.12", + "@jridgewell/trace-mapping": "^0.3.28", + "jsesc": "^3.0.2" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-compilation-targets": { + "version": "7.28.6", + "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.28.6.tgz", + "integrity": "sha512-JYtls3hqi15fcx5GaSNL7SCTJ2MNmjrkHXg4FSpOA/grxK8KwyZ5bubHsCq8FXCkua6xhuaaBit+3b7+VZRfcA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/compat-data": "^7.28.6", + "@babel/helper-validator-option": "^7.27.1", + "browserslist": "^4.24.0", + "lru-cache": "^5.1.1", + "semver": "^6.3.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-globals": { + "version": "7.28.0", + "resolved": "https://registry.npmjs.org/@babel/helper-globals/-/helper-globals-7.28.0.tgz", + "integrity": "sha512-+W6cISkXFa1jXsDEdYA8HeevQT/FULhxzR99pxphltZcVaugps53THCeiWA8SguxxpSp3gKPiuYfSWopkLQ4hw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-module-imports": { + "version": "7.28.6", + "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.28.6.tgz", + "integrity": "sha512-l5XkZK7r7wa9LucGw9LwZyyCUscb4x37JWTPz7swwFE/0FMQAGpiWUZn8u9DzkSBWEcK25jmvubfpw2dnAMdbw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/traverse": "^7.28.6", + "@babel/types": "^7.28.6" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-module-transforms": { + "version": "7.28.6", + "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.28.6.tgz", + "integrity": "sha512-67oXFAYr2cDLDVGLXTEABjdBJZ6drElUSI7WKp70NrpyISso3plG9SAGEF6y7zbha/wOzUByWWTJvEDVNIUGcA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-module-imports": "^7.28.6", + "@babel/helper-validator-identifier": "^7.28.5", + "@babel/traverse": "^7.28.6" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/@babel/helper-plugin-utils": { + "version": "7.28.6", + "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.28.6.tgz", + "integrity": "sha512-S9gzZ/bz83GRysI7gAD4wPT/AI3uCnY+9xn+Mx/KPs2JwHJIz1W8PZkg2cqyt3RNOBM8ejcXhV6y8Og7ly/Dug==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-string-parser": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.27.1.tgz", + "integrity": "sha512-qMlSxKbpRlAridDExk92nSobyDdpPijUq2DW6oDnUqd0iOGxmQjyqhMIihI9+zv4LPyZdRje2cavWPbCbWm3eA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-validator-identifier": { + "version": "7.28.5", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.28.5.tgz", + "integrity": "sha512-qSs4ifwzKJSV39ucNjsvc6WVHs6b7S03sOh2OcHF9UHfVPqWWALUsNUVzhSBiItjRZoLHx7nIarVjqKVusUZ1Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-validator-option": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.27.1.tgz", + "integrity": "sha512-YvjJow9FxbhFFKDSuFnVCe2WxXk1zWc22fFePVNEaWJEu8IrZVlda6N0uHwzZrUM1il7NC9Mlp4MaJYbYd9JSg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helpers": { + "version": "7.29.2", + "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.29.2.tgz", + "integrity": "sha512-HoGuUs4sCZNezVEKdVcwqmZN8GoHirLUcLaYVNBK2J0DadGtdcqgr3BCbvH8+XUo4NGjNl3VOtSjEKNzqfFgKw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/template": "^7.28.6", + "@babel/types": "^7.29.0" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/parser": { + "version": "7.29.2", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.29.2.tgz", + "integrity": "sha512-4GgRzy/+fsBa72/RZVJmGKPmZu9Byn8o4MoLpmNe1m8ZfYnz5emHLQz3U4gLud6Zwl0RZIcgiLD7Uq7ySFuDLA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/types": "^7.29.0" + }, + "bin": { + "parser": "bin/babel-parser.js" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@babel/plugin-transform-arrow-functions": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-arrow-functions/-/plugin-transform-arrow-functions-7.27.1.tgz", + "integrity": "sha512-8Z4TGic6xW70FKThA5HYEKKyBpOOsucTOD1DjU3fZxDg+K3zBJcXMFnt/4yQiZnf5+MiOMSXQ9PaEK/Ilh1DeA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-react-jsx-self": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-jsx-self/-/plugin-transform-react-jsx-self-7.27.1.tgz", + "integrity": "sha512-6UzkCs+ejGdZ5mFFC/OCUrv028ab2fp1znZmCZjAOBKiBK2jXD1O+BPSfX8X2qjJ75fZBMSnQn3Rq2mrBJK2mw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-react-jsx-source": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-jsx-source/-/plugin-transform-react-jsx-source-7.27.1.tgz", + "integrity": "sha512-zbwoTsBruTeKB9hSq73ha66iFeJHuaFkUbwvqElnygoNbj/jHRsSeokowZFN3CZ64IvEqcmmkVe89OPXc7ldAw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/template": { + "version": "7.28.6", + "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.28.6.tgz", + "integrity": "sha512-YA6Ma2KsCdGb+WC6UpBVFJGXL58MDA6oyONbjyF/+5sBgxY/dwkhLogbMT2GXXyU84/IhRw/2D1Os1B/giz+BQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/code-frame": "^7.28.6", + "@babel/parser": "^7.28.6", + "@babel/types": "^7.28.6" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/traverse": { + "version": "7.29.0", + "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.29.0.tgz", + "integrity": "sha512-4HPiQr0X7+waHfyXPZpWPfWL/J7dcN1mx9gL6WdQVMbPnF3+ZhSMs8tCxN7oHddJE9fhNE7+lxdnlyemKfJRuA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/code-frame": "^7.29.0", + "@babel/generator": "^7.29.0", + "@babel/helper-globals": "^7.28.0", + "@babel/parser": "^7.29.0", + "@babel/template": "^7.28.6", + "@babel/types": "^7.29.0", + "debug": "^4.3.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/types": { + "version": "7.29.0", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.29.0.tgz", + "integrity": "sha512-LwdZHpScM4Qz8Xw2iKSzS+cfglZzJGvofQICy7W7v4caru4EaAmyUuO6BGrbyQ2mYV11W0U8j5mBhd14dd3B0A==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-string-parser": "^7.27.1", + "@babel/helper-validator-identifier": "^7.28.5" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@develar/schema-utils": { + "version": "2.6.5", + "resolved": "https://registry.npmjs.org/@develar/schema-utils/-/schema-utils-2.6.5.tgz", + "integrity": "sha512-0cp4PsWQ/9avqTVMCtZ+GirikIA36ikvjtHweU4/j8yLtgObI0+JUPhYFScgwlteveGB1rt3Cm8UhN04XayDig==", + "dev": true, + "license": "MIT", + "dependencies": { + "ajv": "^6.12.0", + "ajv-keywords": "^3.4.1" + }, + "engines": { + "node": ">= 8.9.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/webpack" + } + }, + "node_modules/@electron-toolkit/preload": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/@electron-toolkit/preload/-/preload-3.0.2.tgz", + "integrity": "sha512-TWWPToXd8qPRfSXwzf5KVhpXMfONaUuRAZJHsKthKgZR/+LqX1dZVSSClQ8OTAEduvLGdecljCsoT2jSshfoUg==", + "dev": true, + "license": "MIT", + "peerDependencies": { + "electron": ">=13.0.0" + } + }, + "node_modules/@electron-toolkit/utils": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/@electron-toolkit/utils/-/utils-3.0.0.tgz", + "integrity": "sha512-GaXHDhiT7KCvMJjXdp/QqpYinq69T/Pdl49Z1XLf8mKGf63dnsODMWyrmIjEQ0z/vG7dO8qF3fvmI6Eb2lUNZA==", + "dev": true, + "license": "MIT", + "peerDependencies": { + "electron": ">=13.0.0" + } + }, + "node_modules/@electron/asar": { + "version": "3.4.1", + "resolved": "https://registry.npmjs.org/@electron/asar/-/asar-3.4.1.tgz", + "integrity": "sha512-i4/rNPRS84t0vSRa2HorerGRXWyF4vThfHesw0dmcWHp+cspK743UanA0suA5Q5y8kzY2y6YKrvbIUn69BCAiA==", + "dev": true, + "license": "MIT", + "dependencies": { + "commander": "^5.0.0", + "glob": "^7.1.6", + "minimatch": "^3.0.4" + }, + "bin": { + "asar": "bin/asar.js" + }, + "engines": { + "node": ">=10.12.0" + } + }, + "node_modules/@electron/asar/node_modules/balanced-match": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", + "dev": true, + "license": "MIT" + }, + "node_modules/@electron/asar/node_modules/brace-expansion": { + "version": "1.1.14", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.14.tgz", + "integrity": "sha512-MWPGfDxnyzKU7rNOW9SP/c50vi3xrmrua/+6hfPbCS2ABNWfx24vPidzvC7krjU/RTo235sV776ymlsMtGKj8g==", + "dev": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/@electron/asar/node_modules/minimatch": { + "version": "3.1.5", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.5.tgz", + "integrity": "sha512-VgjWUsnnT6n+NUk6eZq77zeFdpW2LWDzP6zFGrCbHXiYNul5Dzqk2HHQ5uFH2DNW5Xbp8+jVzaeNt94ssEEl4w==", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/@electron/get": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/@electron/get/-/get-2.0.3.tgz", + "integrity": "sha512-Qkzpg2s9GnVV2I2BjRksUi43U5e6+zaQMcjoJy0C+C5oxaKl+fmckGDQFtRpZpZV0NQekuZZ+tGz7EA9TVnQtQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "debug": "^4.1.1", + "env-paths": "^2.2.0", + "fs-extra": "^8.1.0", + "got": "^11.8.5", + "progress": "^2.0.3", + "semver": "^6.2.0", + "sumchecker": "^3.0.1" + }, + "engines": { + "node": ">=12" + }, + "optionalDependencies": { + "global-agent": "^3.0.0" + } + }, + "node_modules/@electron/notarize": { + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/@electron/notarize/-/notarize-2.5.0.tgz", + "integrity": "sha512-jNT8nwH1f9X5GEITXaQ8IF/KdskvIkOFfB2CvwumsveVidzpSc+mvhhTMdAGSYF3O+Nq49lJ7y+ssODRXu06+A==", + "dev": true, + "license": "MIT", + "dependencies": { + "debug": "^4.1.1", + "fs-extra": "^9.0.1", + "promise-retry": "^2.0.1" + }, + "engines": { + "node": ">= 10.0.0" + } + }, + "node_modules/@electron/notarize/node_modules/fs-extra": { + "version": "9.1.0", + "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-9.1.0.tgz", + "integrity": "sha512-hcg3ZmepS30/7BSFqRvoo3DOMQu7IjqxO5nCDt+zM9XWjb33Wg7ziNT+Qvqbuc3+gWpzO02JubVyk2G4Zvo1OQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "at-least-node": "^1.0.0", + "graceful-fs": "^4.2.0", + "jsonfile": "^6.0.1", + "universalify": "^2.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/@electron/notarize/node_modules/jsonfile": { + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-6.2.0.tgz", + "integrity": "sha512-FGuPw30AdOIUTRMC2OMRtQV+jkVj2cfPqSeWXv1NEAJ1qZ5zb1X6z1mFhbfOB/iy3ssJCD+3KuZ8r8C3uVFlAg==", + "dev": true, + "license": "MIT", + "dependencies": { + "universalify": "^2.0.0" + }, + "optionalDependencies": { + "graceful-fs": "^4.1.6" + } + }, + "node_modules/@electron/notarize/node_modules/universalify": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/universalify/-/universalify-2.0.1.tgz", + "integrity": "sha512-gptHNQghINnc/vTGIk0SOFGFNXw7JVrlRUtConJRlvaw6DuX0wO5Jeko9sWrMBhh+PsYAZ7oXAiOnf/UKogyiw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 10.0.0" + } + }, + "node_modules/@electron/osx-sign": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/@electron/osx-sign/-/osx-sign-1.3.1.tgz", + "integrity": "sha512-BAfviURMHpmb1Yb50YbCxnOY0wfwaLXH5KJ4+80zS0gUkzDX3ec23naTlEqKsN+PwYn+a1cCzM7BJ4Wcd3sGzw==", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "compare-version": "^0.1.2", + "debug": "^4.3.4", + "fs-extra": "^10.0.0", + "isbinaryfile": "^4.0.8", + "minimist": "^1.2.6", + "plist": "^3.0.5" + }, + "bin": { + "electron-osx-flat": "bin/electron-osx-flat.js", + "electron-osx-sign": "bin/electron-osx-sign.js" + }, + "engines": { + "node": ">=12.0.0" + } + }, + "node_modules/@electron/osx-sign/node_modules/fs-extra": { + "version": "10.1.0", + "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-10.1.0.tgz", + "integrity": "sha512-oRXApq54ETRj4eMiFzGnHWGy+zo5raudjuxN0b8H7s/RU2oW0Wvsx9O0ACRN/kRq9E8Vu/ReskGB5o3ji+FzHQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "graceful-fs": "^4.2.0", + "jsonfile": "^6.0.1", + "universalify": "^2.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/@electron/osx-sign/node_modules/isbinaryfile": { + "version": "4.0.10", + "resolved": "https://registry.npmjs.org/isbinaryfile/-/isbinaryfile-4.0.10.tgz", + "integrity": "sha512-iHrqe5shvBUcFbmZq9zOQHBoeOhZJu6RQGrDpBgenUm/Am+F3JM2MgQj+rK3Z601fzrL5gLZWtAPH2OBaSVcyw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 8.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/gjtorikian/" + } + }, + "node_modules/@electron/osx-sign/node_modules/jsonfile": { + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-6.2.0.tgz", + "integrity": "sha512-FGuPw30AdOIUTRMC2OMRtQV+jkVj2cfPqSeWXv1NEAJ1qZ5zb1X6z1mFhbfOB/iy3ssJCD+3KuZ8r8C3uVFlAg==", + "dev": true, + "license": "MIT", + "dependencies": { + "universalify": "^2.0.0" + }, + "optionalDependencies": { + "graceful-fs": "^4.1.6" + } + }, + "node_modules/@electron/osx-sign/node_modules/universalify": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/universalify/-/universalify-2.0.1.tgz", + "integrity": "sha512-gptHNQghINnc/vTGIk0SOFGFNXw7JVrlRUtConJRlvaw6DuX0wO5Jeko9sWrMBhh+PsYAZ7oXAiOnf/UKogyiw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 10.0.0" + } + }, + "node_modules/@electron/rebuild": { + "version": "3.6.1", + "resolved": "https://registry.npmjs.org/@electron/rebuild/-/rebuild-3.6.1.tgz", + "integrity": "sha512-f6596ZHpEq/YskUd8emYvOUne89ij8mQgjYFA5ru25QwbrRO+t1SImofdDv7kKOuWCmVOuU5tvfkbgGxIl3E/w==", + "dev": true, + "license": "MIT", + "dependencies": { + "@malept/cross-spawn-promise": "^2.0.0", + "chalk": "^4.0.0", + "debug": "^4.1.1", + "detect-libc": "^2.0.1", + "fs-extra": "^10.0.0", + "got": "^11.7.0", + "node-abi": "^3.45.0", + "node-api-version": "^0.2.0", + "node-gyp": "^9.0.0", + "ora": "^5.1.0", + "read-binary-file-arch": "^1.0.6", + "semver": "^7.3.5", + "tar": "^6.0.5", + "yargs": "^17.0.1" + }, + "bin": { + "electron-rebuild": "lib/cli.js" + }, + "engines": { + "node": ">=12.13.0" + } + }, + "node_modules/@electron/rebuild/node_modules/fs-extra": { + "version": "10.1.0", + "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-10.1.0.tgz", + "integrity": "sha512-oRXApq54ETRj4eMiFzGnHWGy+zo5raudjuxN0b8H7s/RU2oW0Wvsx9O0ACRN/kRq9E8Vu/ReskGB5o3ji+FzHQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "graceful-fs": "^4.2.0", + "jsonfile": "^6.0.1", + "universalify": "^2.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/@electron/rebuild/node_modules/jsonfile": { + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-6.2.0.tgz", + "integrity": "sha512-FGuPw30AdOIUTRMC2OMRtQV+jkVj2cfPqSeWXv1NEAJ1qZ5zb1X6z1mFhbfOB/iy3ssJCD+3KuZ8r8C3uVFlAg==", + "dev": true, + "license": "MIT", + "dependencies": { + "universalify": "^2.0.0" + }, + "optionalDependencies": { + "graceful-fs": "^4.1.6" + } + }, + "node_modules/@electron/rebuild/node_modules/semver": { + "version": "7.7.4", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.4.tgz", + "integrity": "sha512-vFKC2IEtQnVhpT78h1Yp8wzwrf8CM+MzKMHGJZfBtzhZNycRFnXsHk6E5TxIkkMsgNS7mdX3AGB7x2QM2di4lA==", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/@electron/rebuild/node_modules/universalify": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/universalify/-/universalify-2.0.1.tgz", + "integrity": "sha512-gptHNQghINnc/vTGIk0SOFGFNXw7JVrlRUtConJRlvaw6DuX0wO5Jeko9sWrMBhh+PsYAZ7oXAiOnf/UKogyiw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 10.0.0" + } + }, + "node_modules/@electron/universal": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@electron/universal/-/universal-2.0.1.tgz", + "integrity": "sha512-fKpv9kg4SPmt+hY7SVBnIYULE9QJl8L3sCfcBsnqbJwwBwAeTLokJ9TRt9y7bK0JAzIW2y78TVVjvnQEms/yyA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@electron/asar": "^3.2.7", + "@malept/cross-spawn-promise": "^2.0.0", + "debug": "^4.3.1", + "dir-compare": "^4.2.0", + "fs-extra": "^11.1.1", + "minimatch": "^9.0.3", + "plist": "^3.1.0" + }, + "engines": { + "node": ">=16.4" + } + }, + "node_modules/@electron/universal/node_modules/balanced-match": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", + "dev": true, + "license": "MIT" + }, + "node_modules/@electron/universal/node_modules/brace-expansion": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.1.0.tgz", + "integrity": "sha512-TN1kCZAgdgweJhWWpgKYrQaMNHcDULHkWwQIspdtjV4Y5aurRdZpjAqn6yX3FPqTA9ngHCc4hJxMAMgGfve85w==", + "dev": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/@electron/universal/node_modules/fs-extra": { + "version": "11.3.4", + "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-11.3.4.tgz", + "integrity": "sha512-CTXd6rk/M3/ULNQj8FBqBWHYBVYybQ3VPBw0xGKFe3tuH7ytT6ACnvzpIQ3UZtB8yvUKC2cXn1a+x+5EVQLovA==", + "dev": true, + "license": "MIT", + "dependencies": { + "graceful-fs": "^4.2.0", + "jsonfile": "^6.0.1", + "universalify": "^2.0.0" + }, + "engines": { + "node": ">=14.14" + } + }, + "node_modules/@electron/universal/node_modules/jsonfile": { + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-6.2.0.tgz", + "integrity": "sha512-FGuPw30AdOIUTRMC2OMRtQV+jkVj2cfPqSeWXv1NEAJ1qZ5zb1X6z1mFhbfOB/iy3ssJCD+3KuZ8r8C3uVFlAg==", + "dev": true, + "license": "MIT", + "dependencies": { + "universalify": "^2.0.0" + }, + "optionalDependencies": { + "graceful-fs": "^4.1.6" + } + }, + "node_modules/@electron/universal/node_modules/minimatch": { + "version": "9.0.9", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.9.tgz", + "integrity": "sha512-OBwBN9AL4dqmETlpS2zasx+vTeWclWzkblfZk7KTA5j3jeOONz/tRCnZomUyvNg83wL5Zv9Ss6HMJXAgL8R2Yg==", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^2.0.2" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/@electron/universal/node_modules/universalify": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/universalify/-/universalify-2.0.1.tgz", + "integrity": "sha512-gptHNQghINnc/vTGIk0SOFGFNXw7JVrlRUtConJRlvaw6DuX0wO5Jeko9sWrMBhh+PsYAZ7oXAiOnf/UKogyiw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 10.0.0" + } + }, + "node_modules/@esbuild/aix-ppc64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.21.5.tgz", + "integrity": "sha512-1SDgH6ZSPTlggy1yI6+Dbkiz8xzpHJEVAlF/AM1tHPLsf5STom9rwtjE4hKAF20FfXXNTFqEYXyJNWh1GiZedQ==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "aix" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/android-arm": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.21.5.tgz", + "integrity": "sha512-vCPvzSjpPHEi1siZdlvAlsPxXl7WbOVUBBAowWug4rJHb68Ox8KualB+1ocNvT5fjv6wpkX6o/iEpbDrf68zcg==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/android-arm64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.21.5.tgz", + "integrity": "sha512-c0uX9VAUBQ7dTDCjq+wdyGLowMdtR/GoC2U5IYk/7D1H1JYC0qseD7+11iMP2mRLN9RcCMRcjC4YMclCzGwS/A==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/android-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.21.5.tgz", + "integrity": "sha512-D7aPRUUNHRBwHxzxRvp856rjUHRFW1SdQATKXH2hqA0kAZb1hKmi02OpYRacl0TxIGz/ZmXWlbZgjwWYaCakTA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/darwin-arm64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.21.5.tgz", + "integrity": "sha512-DwqXqZyuk5AiWWf3UfLiRDJ5EDd49zg6O9wclZ7kUMv2WRFr4HKjXp/5t8JZ11QbQfUS6/cRCKGwYhtNAY88kQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/darwin-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.21.5.tgz", + "integrity": "sha512-se/JjF8NlmKVG4kNIuyWMV/22ZaerB+qaSi5MdrXtd6R08kvs2qCN4C09miupktDitvh8jRFflwGFBQcxZRjbw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/freebsd-arm64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.21.5.tgz", + "integrity": "sha512-5JcRxxRDUJLX8JXp/wcBCy3pENnCgBR9bN6JsY4OmhfUtIHe3ZW0mawA7+RDAcMLrMIZaf03NlQiX9DGyB8h4g==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/freebsd-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.21.5.tgz", + "integrity": "sha512-J95kNBj1zkbMXtHVH29bBriQygMXqoVQOQYA+ISs0/2l3T9/kj42ow2mpqerRBxDJnmkUDCaQT/dfNXWX/ZZCQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-arm": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.21.5.tgz", + "integrity": "sha512-bPb5AHZtbeNGjCKVZ9UGqGwo8EUu4cLq68E95A53KlxAPRmUyYv2D6F0uUI65XisGOL1hBP5mTronbgo+0bFcA==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-arm64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.21.5.tgz", + "integrity": "sha512-ibKvmyYzKsBeX8d8I7MH/TMfWDXBF3db4qM6sy+7re0YXya+K1cem3on9XgdT2EQGMu4hQyZhan7TeQ8XkGp4Q==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-ia32": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.21.5.tgz", + "integrity": "sha512-YvjXDqLRqPDl2dvRODYmmhz4rPeVKYvppfGYKSNGdyZkA01046pLWyRKKI3ax8fbJoK5QbxblURkwK/MWY18Tg==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-loong64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.21.5.tgz", + "integrity": "sha512-uHf1BmMG8qEvzdrzAqg2SIG/02+4/DHB6a9Kbya0XDvwDEKCoC8ZRWI5JJvNdUjtciBGFQ5PuBlpEOXQj+JQSg==", + "cpu": [ + "loong64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-mips64el": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.21.5.tgz", + "integrity": "sha512-IajOmO+KJK23bj52dFSNCMsz1QP1DqM6cwLUv3W1QwyxkyIWecfafnI555fvSGqEKwjMXVLokcV5ygHW5b3Jbg==", + "cpu": [ + "mips64el" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-ppc64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.21.5.tgz", + "integrity": "sha512-1hHV/Z4OEfMwpLO8rp7CvlhBDnjsC3CttJXIhBi+5Aj5r+MBvy4egg7wCbe//hSsT+RvDAG7s81tAvpL2XAE4w==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-riscv64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.21.5.tgz", + "integrity": "sha512-2HdXDMd9GMgTGrPWnJzP2ALSokE/0O5HhTUvWIbD3YdjME8JwvSCnNGBnTThKGEB91OZhzrJ4qIIxk/SBmyDDA==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-s390x": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.21.5.tgz", + "integrity": "sha512-zus5sxzqBJD3eXxwvjN1yQkRepANgxE9lgOW2qLnmr8ikMTphkjgXu1HR01K4FJg8h1kEEDAqDcZQtbrRnB41A==", + "cpu": [ + "s390x" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.21.5.tgz", + "integrity": "sha512-1rYdTpyv03iycF1+BhzrzQJCdOuAOtaqHTWJZCWvijKD2N5Xu0TtVC8/+1faWqcP9iBCWOmjmhoH94dH82BxPQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/netbsd-arm64": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.25.12.tgz", + "integrity": "sha512-xXwcTq4GhRM7J9A8Gv5boanHhRa/Q9KLVmcyXHCTaM4wKfIpWkdXiMog/KsnxzJ0A1+nD+zoecuzqPmCRyBGjg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/netbsd-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.21.5.tgz", + "integrity": "sha512-Woi2MXzXjMULccIwMnLciyZH4nCIMpWQAs049KEeMvOcNADVxo0UBIQPfSmxB3CWKedngg7sWZdLvLczpe0tLg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/openbsd-arm64": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.25.12.tgz", + "integrity": "sha512-fF96T6KsBo/pkQI950FARU9apGNTSlZGsv1jZBAlcLL1MLjLNIWPBkj5NlSz8aAzYKg+eNqknrUJ24QBybeR5A==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/openbsd-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.21.5.tgz", + "integrity": "sha512-HLNNw99xsvx12lFBUwoT8EVCsSvRNDVxNpjZ7bPn947b8gJPzeHWyNVhFsaerc0n3TsbOINvRP2byTZ5LKezow==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/openharmony-arm64": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/openharmony-arm64/-/openharmony-arm64-0.25.12.tgz", + "integrity": "sha512-rm0YWsqUSRrjncSXGA7Zv78Nbnw4XL6/dzr20cyrQf7ZmRcsovpcRBdhD43Nuk3y7XIoW2OxMVvwuRvk9XdASg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openharmony" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/sunos-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.21.5.tgz", + "integrity": "sha512-6+gjmFpfy0BHU5Tpptkuh8+uw3mnrvgs+dSPQXQOv3ekbordwnzTVEb4qnIvQcYXq6gzkyTnoZ9dZG+D4garKg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "sunos" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/win32-arm64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.21.5.tgz", + "integrity": "sha512-Z0gOTd75VvXqyq7nsl93zwahcTROgqvuAcYDUr+vOv8uHhNSKROyU961kgtCD1e95IqPKSQKH7tBTslnS3tA8A==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/win32-ia32": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.21.5.tgz", + "integrity": "sha512-SWXFF1CL2RVNMaVs+BBClwtfZSvDgtL//G/smwAc5oVK/UPu2Gu9tIaRgFmYFFKrmg3SyAjSrElf0TiJ1v8fYA==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/win32-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.21.5.tgz", + "integrity": "sha512-tQd/1efJuzPC6rCFwEvLtci/xNFcTZknmXs98FYDfGE4wP9ClFV98nyKrzJKVPMhdDnjzLhdUyMX4PsQAPjwIw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@floating-ui/core": { + "version": "1.7.5", + "resolved": "https://registry.npmjs.org/@floating-ui/core/-/core-1.7.5.tgz", + "integrity": "sha512-1Ih4WTWyw0+lKyFMcBHGbb5U5FtuHJuujoyyr5zTaWS5EYMeT6Jb2AuDeftsCsEuchO+mM2ij5+q9crhydzLhQ==", + "license": "MIT", + "dependencies": { + "@floating-ui/utils": "^0.2.11" + } + }, + "node_modules/@floating-ui/dom": { + "version": "1.7.6", + "resolved": "https://registry.npmjs.org/@floating-ui/dom/-/dom-1.7.6.tgz", + "integrity": "sha512-9gZSAI5XM36880PPMm//9dfiEngYoC6Am2izES1FF406YFsjvyBMmeJ2g4SAju3xWwtuynNRFL2s9hgxpLI5SQ==", + "license": "MIT", + "dependencies": { + "@floating-ui/core": "^1.7.5", + "@floating-ui/utils": "^0.2.11" + } + }, + "node_modules/@floating-ui/react-dom": { + "version": "2.1.8", + "resolved": "https://registry.npmjs.org/@floating-ui/react-dom/-/react-dom-2.1.8.tgz", + "integrity": "sha512-cC52bHwM/n/CxS87FH0yWdngEZrjdtLW/qVruo68qg+prK7ZQ4YGdut2GyDVpoGeAYe/h899rVeOVm6Oi40k2A==", + "license": "MIT", + "dependencies": { + "@floating-ui/dom": "^1.7.6" + }, + "peerDependencies": { + "react": ">=16.8.0", + "react-dom": ">=16.8.0" + } + }, + "node_modules/@floating-ui/utils": { + "version": "0.2.11", + "resolved": "https://registry.npmjs.org/@floating-ui/utils/-/utils-0.2.11.tgz", + "integrity": "sha512-RiB/yIh78pcIxl6lLMG0CgBXAZ2Y0eVHqMPYugu+9U0AeT6YBeiJpf7lbdJNIugFP5SIjwNRgo4DhR1Qxi26Gg==", + "license": "MIT" + }, + "node_modules/@fontsource/inter": { + "version": "5.2.8", + "resolved": "https://registry.npmjs.org/@fontsource/inter/-/inter-5.2.8.tgz", + "integrity": "sha512-P6r5WnJoKiNVV+zvW2xM13gNdFhAEpQ9dQJHt3naLvfg+LkF2ldgSLiF4T41lf1SQCM9QmkqPTn4TH568IRagg==", + "license": "OFL-1.1", + "funding": { + "url": "https://github.com/sponsors/ayuhito" + } + }, + "node_modules/@gar/promisify": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/@gar/promisify/-/promisify-1.1.3.tgz", + "integrity": "sha512-k2Ty1JcVojjJFwrg/ThKi2ujJ7XNLYaFGNB/bWT9wGR+oSMJHMa5w+CUq6p/pVrKeNNgA7pCqEcjSnHVoqJQFw==", + "dev": true, + "license": "MIT" + }, + "node_modules/@isaacs/cliui": { + "version": "8.0.2", + "resolved": "https://registry.npmjs.org/@isaacs/cliui/-/cliui-8.0.2.tgz", + "integrity": "sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==", + "dev": true, + "license": "ISC", + "dependencies": { + "string-width": "^5.1.2", + "string-width-cjs": "npm:string-width@^4.2.0", + "strip-ansi": "^7.0.1", + "strip-ansi-cjs": "npm:strip-ansi@^6.0.1", + "wrap-ansi": "^8.1.0", + "wrap-ansi-cjs": "npm:wrap-ansi@^7.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/@isaacs/cliui/node_modules/ansi-regex": { + "version": "6.2.2", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.2.2.tgz", + "integrity": "sha512-Bq3SmSpyFHaWjPk8If9yc6svM8c56dB5BAtW4Qbw5jHTwwXXcTLoRMkpDJp6VL0XzlWaCHTXrkFURMYmD0sLqg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-regex?sponsor=1" + } + }, + "node_modules/@isaacs/cliui/node_modules/ansi-styles": { + "version": "6.2.3", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.3.tgz", + "integrity": "sha512-4Dj6M28JB+oAH8kFkTLUo+a2jwOFkuqb3yucU0CANcRRUbxS0cP0nZYCGjcc3BNXwRIsUVmDGgzawme7zvJHvg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/@isaacs/cliui/node_modules/emoji-regex": { + "version": "9.2.2", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz", + "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==", + "dev": true, + "license": "MIT" + }, + "node_modules/@isaacs/cliui/node_modules/string-width": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-5.1.2.tgz", + "integrity": "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==", + "dev": true, + "license": "MIT", + "dependencies": { + "eastasianwidth": "^0.2.0", + "emoji-regex": "^9.2.2", + "strip-ansi": "^7.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@isaacs/cliui/node_modules/strip-ansi": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.2.0.tgz", + "integrity": "sha512-yDPMNjp4WyfYBkHnjIRLfca1i6KMyGCtsVgoKe/z1+6vukgaENdgGBZt+ZmKPc4gavvEZ5OgHfHdrazhgNyG7w==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-regex": "^6.2.2" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/strip-ansi?sponsor=1" + } + }, + "node_modules/@isaacs/cliui/node_modules/wrap-ansi": { + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-8.1.0.tgz", + "integrity": "sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-styles": "^6.1.0", + "string-width": "^5.0.1", + "strip-ansi": "^7.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "node_modules/@jridgewell/gen-mapping": { + "version": "0.3.13", + "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.13.tgz", + "integrity": "sha512-2kkt/7niJ6MgEPxF0bYdQ6etZaA+fQvDcLKckhy1yIQOzaoKjBBjSj63/aLVjYE3qhRt5dvM+uUyfCg6UKCBbA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/sourcemap-codec": "^1.5.0", + "@jridgewell/trace-mapping": "^0.3.24" + } + }, + "node_modules/@jridgewell/remapping": { + "version": "2.3.5", + "resolved": "https://registry.npmjs.org/@jridgewell/remapping/-/remapping-2.3.5.tgz", + "integrity": "sha512-LI9u/+laYG4Ds1TDKSJW2YPrIlcVYOwi2fUC6xB43lueCjgxV4lffOCZCtYFiH6TNOX+tQKXx97T4IKHbhyHEQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/gen-mapping": "^0.3.5", + "@jridgewell/trace-mapping": "^0.3.24" + } + }, + "node_modules/@jridgewell/resolve-uri": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz", + "integrity": "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@jridgewell/sourcemap-codec": { + "version": "1.5.5", + "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.5.tgz", + "integrity": "sha512-cYQ9310grqxueWbl+WuIUIaiUaDcj7WOq5fVhEljNVgRfOUhY9fy2zTvfoqWsnebh8Sl70VScFbICvJnLKB0Og==", + "dev": true, + "license": "MIT" + }, + "node_modules/@jridgewell/trace-mapping": { + "version": "0.3.31", + "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.31.tgz", + "integrity": "sha512-zzNR+SdQSDJzc8joaeP8QQoCQr8NuYx2dIIytl1QeBEZHJ9uW6hebsrYgbz8hJwUQao3TWCMtmfV8Nu1twOLAw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/resolve-uri": "^3.1.0", + "@jridgewell/sourcemap-codec": "^1.4.14" + } + }, + "node_modules/@malept/cross-spawn-promise": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/@malept/cross-spawn-promise/-/cross-spawn-promise-2.0.0.tgz", + "integrity": "sha512-1DpKU0Z5ThltBwjNySMC14g0CkbyhCaz9FkhxqNsZI6uAPJXFS8cMXlBKo26FJ8ZuW6S9GCMcR9IO5k2X5/9Fg==", + "dev": true, + "funding": [ + { + "type": "individual", + "url": "https://github.com/sponsors/malept" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/subscription/pkg/npm-.malept-cross-spawn-promise?utm_medium=referral&utm_source=npm_fund" + } + ], + "license": "Apache-2.0", + "dependencies": { + "cross-spawn": "^7.0.1" + }, + "engines": { + "node": ">= 12.13.0" + } + }, + "node_modules/@malept/flatpak-bundler": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/@malept/flatpak-bundler/-/flatpak-bundler-0.4.0.tgz", + "integrity": "sha512-9QOtNffcOF/c1seMCDnjckb3R9WHcG34tky+FHpNKKCW0wc/scYLwMtO+ptyGUfMW0/b/n4qRiALlaFHc9Oj7Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "debug": "^4.1.1", + "fs-extra": "^9.0.0", + "lodash": "^4.17.15", + "tmp-promise": "^3.0.2" + }, + "engines": { + "node": ">= 10.0.0" + } + }, + "node_modules/@malept/flatpak-bundler/node_modules/fs-extra": { + "version": "9.1.0", + "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-9.1.0.tgz", + "integrity": "sha512-hcg3ZmepS30/7BSFqRvoo3DOMQu7IjqxO5nCDt+zM9XWjb33Wg7ziNT+Qvqbuc3+gWpzO02JubVyk2G4Zvo1OQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "at-least-node": "^1.0.0", + "graceful-fs": "^4.2.0", + "jsonfile": "^6.0.1", + "universalify": "^2.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/@malept/flatpak-bundler/node_modules/jsonfile": { + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-6.2.0.tgz", + "integrity": "sha512-FGuPw30AdOIUTRMC2OMRtQV+jkVj2cfPqSeWXv1NEAJ1qZ5zb1X6z1mFhbfOB/iy3ssJCD+3KuZ8r8C3uVFlAg==", + "dev": true, + "license": "MIT", + "dependencies": { + "universalify": "^2.0.0" + }, + "optionalDependencies": { + "graceful-fs": "^4.1.6" + } + }, + "node_modules/@malept/flatpak-bundler/node_modules/universalify": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/universalify/-/universalify-2.0.1.tgz", + "integrity": "sha512-gptHNQghINnc/vTGIk0SOFGFNXw7JVrlRUtConJRlvaw6DuX0wO5Jeko9sWrMBhh+PsYAZ7oXAiOnf/UKogyiw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 10.0.0" + } + }, + "node_modules/@npmcli/fs": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/@npmcli/fs/-/fs-2.1.2.tgz", + "integrity": "sha512-yOJKRvohFOaLqipNtwYB9WugyZKhC/DZC4VYPmpaCzDBrA8YpK3qHZ8/HGscMnE4GqbkLNuVcCnxkeQEdGt6LQ==", + "dev": true, + "license": "ISC", + "dependencies": { + "@gar/promisify": "^1.1.3", + "semver": "^7.3.5" + }, + "engines": { + "node": "^12.13.0 || ^14.15.0 || >=16.0.0" + } + }, + "node_modules/@npmcli/fs/node_modules/semver": { + "version": "7.7.4", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.4.tgz", + "integrity": "sha512-vFKC2IEtQnVhpT78h1Yp8wzwrf8CM+MzKMHGJZfBtzhZNycRFnXsHk6E5TxIkkMsgNS7mdX3AGB7x2QM2di4lA==", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/@npmcli/move-file": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@npmcli/move-file/-/move-file-2.0.1.tgz", + "integrity": "sha512-mJd2Z5TjYWq/ttPLLGqArdtnC74J6bOzg4rMDnN+p1xTacZ2yPRCk2y0oSWQtygLR9YVQXgOcONrwtnk3JupxQ==", + "deprecated": "This functionality has been moved to @npmcli/fs", + "dev": true, + "license": "MIT", + "dependencies": { + "mkdirp": "^1.0.4", + "rimraf": "^3.0.2" + }, + "engines": { + "node": "^12.13.0 || ^14.15.0 || >=16.0.0" + } + }, + "node_modules/@pkgjs/parseargs": { + "version": "0.11.0", + "resolved": "https://registry.npmjs.org/@pkgjs/parseargs/-/parseargs-0.11.0.tgz", + "integrity": "sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==", + "dev": true, + "license": "MIT", + "optional": true, + "engines": { + "node": ">=14" + } + }, + "node_modules/@radix-ui/number": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@radix-ui/number/-/number-1.1.1.tgz", + "integrity": "sha512-MkKCwxlXTgz6CFoJx3pCwn07GKp36+aZyu/u2Ln2VrA5DcdyCZkASEDBTd8x5whTQQL5CiYf4prXKLcgQdv29g==", + "license": "MIT" + }, + "node_modules/@radix-ui/primitive": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/@radix-ui/primitive/-/primitive-1.1.3.tgz", + "integrity": "sha512-JTF99U/6XIjCBo0wqkU5sK10glYe27MRRsfwoiq5zzOEZLHU3A3KCMa5X/azekYRCJ0HlwI0crAXS/5dEHTzDg==", + "license": "MIT" + }, + "node_modules/@radix-ui/react-arrow": { + "version": "1.1.7", + "resolved": "https://registry.npmjs.org/@radix-ui/react-arrow/-/react-arrow-1.1.7.tgz", + "integrity": "sha512-F+M1tLhO+mlQaOWspE8Wstg+z6PwxwRd8oQ8IXceWz92kfAmalTRf0EjrouQeo7QssEPfCn05B4Ihs1K9WQ/7w==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-primitive": "2.1.3" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-collection": { + "version": "1.1.7", + "resolved": "https://registry.npmjs.org/@radix-ui/react-collection/-/react-collection-1.1.7.tgz", + "integrity": "sha512-Fh9rGN0MoI4ZFUNyfFVNU4y9LUz93u9/0K+yLgA2bwRojxM8JU1DyvvMBabnZPBgMWREAJvU2jjVzq+LrFUglw==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-compose-refs": "1.1.2", + "@radix-ui/react-context": "1.1.2", + "@radix-ui/react-primitive": "2.1.3", + "@radix-ui/react-slot": "1.2.3" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-collection/node_modules/@radix-ui/react-slot": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/@radix-ui/react-slot/-/react-slot-1.2.3.tgz", + "integrity": "sha512-aeNmHnBxbi2St0au6VBVC7JXFlhLlOnvIIlePNniyUNAClzmtAUEY8/pBiK3iHjufOlwA+c20/8jngo7xcrg8A==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-compose-refs": "1.1.2" + }, + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-compose-refs": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@radix-ui/react-compose-refs/-/react-compose-refs-1.1.2.tgz", + "integrity": "sha512-z4eqJvfiNnFMHIIvXP3CY57y2WJs5g2v3X0zm9mEJkrkNv4rDxu+sg9Jh8EkXyeqBkB7SOcboo9dMVqhyrACIg==", + "license": "MIT", + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-context": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@radix-ui/react-context/-/react-context-1.1.2.tgz", + "integrity": "sha512-jCi/QKUM2r1Ju5a3J64TH2A5SpKAgh0LpknyqdQ4m6DCV0xJ2HG1xARRwNGPQfi1SLdLWZ1OJz6F4OMBBNiGJA==", + "license": "MIT", + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-dialog": { + "version": "1.1.15", + "resolved": "https://registry.npmjs.org/@radix-ui/react-dialog/-/react-dialog-1.1.15.tgz", + "integrity": "sha512-TCglVRtzlffRNxRMEyR36DGBLJpeusFcgMVD9PZEzAKnUs1lKCgX5u9BmC2Yg+LL9MgZDugFFs1Vl+Jp4t/PGw==", + "license": "MIT", + "dependencies": { + "@radix-ui/primitive": "1.1.3", + "@radix-ui/react-compose-refs": "1.1.2", + "@radix-ui/react-context": "1.1.2", + "@radix-ui/react-dismissable-layer": "1.1.11", + "@radix-ui/react-focus-guards": "1.1.3", + "@radix-ui/react-focus-scope": "1.1.7", + "@radix-ui/react-id": "1.1.1", + "@radix-ui/react-portal": "1.1.9", + "@radix-ui/react-presence": "1.1.5", + "@radix-ui/react-primitive": "2.1.3", + "@radix-ui/react-slot": "1.2.3", + "@radix-ui/react-use-controllable-state": "1.2.2", + "aria-hidden": "^1.2.4", + "react-remove-scroll": "^2.6.3" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-dialog/node_modules/@radix-ui/react-slot": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/@radix-ui/react-slot/-/react-slot-1.2.3.tgz", + "integrity": "sha512-aeNmHnBxbi2St0au6VBVC7JXFlhLlOnvIIlePNniyUNAClzmtAUEY8/pBiK3iHjufOlwA+c20/8jngo7xcrg8A==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-compose-refs": "1.1.2" + }, + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-direction": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@radix-ui/react-direction/-/react-direction-1.1.1.tgz", + "integrity": "sha512-1UEWRX6jnOA2y4H5WczZ44gOOjTEmlqv1uNW4GAJEO5+bauCBhv8snY65Iw5/VOS/ghKN9gr2KjnLKxrsvoMVw==", + "license": "MIT", + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-dismissable-layer": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/@radix-ui/react-dismissable-layer/-/react-dismissable-layer-1.1.11.tgz", + "integrity": "sha512-Nqcp+t5cTB8BinFkZgXiMJniQH0PsUt2k51FUhbdfeKvc4ACcG2uQniY/8+h1Yv6Kza4Q7lD7PQV0z0oicE0Mg==", + "license": "MIT", + "dependencies": { + "@radix-ui/primitive": "1.1.3", + "@radix-ui/react-compose-refs": "1.1.2", + "@radix-ui/react-primitive": "2.1.3", + "@radix-ui/react-use-callback-ref": "1.1.1", + "@radix-ui/react-use-escape-keydown": "1.1.1" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-dropdown-menu": { + "version": "2.1.16", + "resolved": "https://registry.npmjs.org/@radix-ui/react-dropdown-menu/-/react-dropdown-menu-2.1.16.tgz", + "integrity": "sha512-1PLGQEynI/3OX/ftV54COn+3Sud/Mn8vALg2rWnBLnRaGtJDduNW/22XjlGgPdpcIbiQxjKtb7BkcjP00nqfJw==", + "license": "MIT", + "dependencies": { + "@radix-ui/primitive": "1.1.3", + "@radix-ui/react-compose-refs": "1.1.2", + "@radix-ui/react-context": "1.1.2", + "@radix-ui/react-id": "1.1.1", + "@radix-ui/react-menu": "2.1.16", + "@radix-ui/react-primitive": "2.1.3", + "@radix-ui/react-use-controllable-state": "1.2.2" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-focus-guards": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/@radix-ui/react-focus-guards/-/react-focus-guards-1.1.3.tgz", + "integrity": "sha512-0rFg/Rj2Q62NCm62jZw0QX7a3sz6QCQU0LpZdNrJX8byRGaGVTqbrW9jAoIAHyMQqsNpeZ81YgSizOt5WXq0Pw==", + "license": "MIT", + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-focus-scope": { + "version": "1.1.7", + "resolved": "https://registry.npmjs.org/@radix-ui/react-focus-scope/-/react-focus-scope-1.1.7.tgz", + "integrity": "sha512-t2ODlkXBQyn7jkl6TNaw/MtVEVvIGelJDCG41Okq/KwUsJBwQ4XVZsHAVUkK4mBv3ewiAS3PGuUWuY2BoK4ZUw==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-compose-refs": "1.1.2", + "@radix-ui/react-primitive": "2.1.3", + "@radix-ui/react-use-callback-ref": "1.1.1" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-id": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@radix-ui/react-id/-/react-id-1.1.1.tgz", + "integrity": "sha512-kGkGegYIdQsOb4XjsfM97rXsiHaBwco+hFI66oO4s9LU+PLAC5oJ7khdOVFxkhsmlbpUqDAvXw11CluXP+jkHg==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-use-layout-effect": "1.1.1" + }, + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-label": { + "version": "2.1.8", + "resolved": "https://registry.npmjs.org/@radix-ui/react-label/-/react-label-2.1.8.tgz", + "integrity": "sha512-FmXs37I6hSBVDlO4y764TNz1rLgKwjJMQ0EGte6F3Cb3f4bIuHB/iLa/8I9VKkmOy+gNHq8rql3j686ACVV21A==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-primitive": "2.1.4" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-label/node_modules/@radix-ui/react-primitive": { + "version": "2.1.4", + "resolved": "https://registry.npmjs.org/@radix-ui/react-primitive/-/react-primitive-2.1.4.tgz", + "integrity": "sha512-9hQc4+GNVtJAIEPEqlYqW5RiYdrr8ea5XQ0ZOnD6fgru+83kqT15mq2OCcbe8KnjRZl5vF3ks69AKz3kh1jrhg==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-slot": "1.2.4" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-menu": { + "version": "2.1.16", + "resolved": "https://registry.npmjs.org/@radix-ui/react-menu/-/react-menu-2.1.16.tgz", + "integrity": "sha512-72F2T+PLlphrqLcAotYPp0uJMr5SjP5SL01wfEspJbru5Zs5vQaSHb4VB3ZMJPimgHHCHG7gMOeOB9H3Hdmtxg==", + "license": "MIT", + "dependencies": { + "@radix-ui/primitive": "1.1.3", + "@radix-ui/react-collection": "1.1.7", + "@radix-ui/react-compose-refs": "1.1.2", + "@radix-ui/react-context": "1.1.2", + "@radix-ui/react-direction": "1.1.1", + "@radix-ui/react-dismissable-layer": "1.1.11", + "@radix-ui/react-focus-guards": "1.1.3", + "@radix-ui/react-focus-scope": "1.1.7", + "@radix-ui/react-id": "1.1.1", + "@radix-ui/react-popper": "1.2.8", + "@radix-ui/react-portal": "1.1.9", + "@radix-ui/react-presence": "1.1.5", + "@radix-ui/react-primitive": "2.1.3", + "@radix-ui/react-roving-focus": "1.1.11", + "@radix-ui/react-slot": "1.2.3", + "@radix-ui/react-use-callback-ref": "1.1.1", + "aria-hidden": "^1.2.4", + "react-remove-scroll": "^2.6.3" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-menu/node_modules/@radix-ui/react-slot": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/@radix-ui/react-slot/-/react-slot-1.2.3.tgz", + "integrity": "sha512-aeNmHnBxbi2St0au6VBVC7JXFlhLlOnvIIlePNniyUNAClzmtAUEY8/pBiK3iHjufOlwA+c20/8jngo7xcrg8A==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-compose-refs": "1.1.2" + }, + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-popover": { + "version": "1.1.15", + "resolved": "https://registry.npmjs.org/@radix-ui/react-popover/-/react-popover-1.1.15.tgz", + "integrity": "sha512-kr0X2+6Yy/vJzLYJUPCZEc8SfQcf+1COFoAqauJm74umQhta9M7lNJHP7QQS3vkvcGLQUbWpMzwrXYwrYztHKA==", + "license": "MIT", + "dependencies": { + "@radix-ui/primitive": "1.1.3", + "@radix-ui/react-compose-refs": "1.1.2", + "@radix-ui/react-context": "1.1.2", + "@radix-ui/react-dismissable-layer": "1.1.11", + "@radix-ui/react-focus-guards": "1.1.3", + "@radix-ui/react-focus-scope": "1.1.7", + "@radix-ui/react-id": "1.1.1", + "@radix-ui/react-popper": "1.2.8", + "@radix-ui/react-portal": "1.1.9", + "@radix-ui/react-presence": "1.1.5", + "@radix-ui/react-primitive": "2.1.3", + "@radix-ui/react-slot": "1.2.3", + "@radix-ui/react-use-controllable-state": "1.2.2", + "aria-hidden": "^1.2.4", + "react-remove-scroll": "^2.6.3" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-popover/node_modules/@radix-ui/react-slot": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/@radix-ui/react-slot/-/react-slot-1.2.3.tgz", + "integrity": "sha512-aeNmHnBxbi2St0au6VBVC7JXFlhLlOnvIIlePNniyUNAClzmtAUEY8/pBiK3iHjufOlwA+c20/8jngo7xcrg8A==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-compose-refs": "1.1.2" + }, + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-popper": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/@radix-ui/react-popper/-/react-popper-1.2.8.tgz", + "integrity": "sha512-0NJQ4LFFUuWkE7Oxf0htBKS6zLkkjBH+hM1uk7Ng705ReR8m/uelduy1DBo0PyBXPKVnBA6YBlU94MBGXrSBCw==", + "license": "MIT", + "dependencies": { + "@floating-ui/react-dom": "^2.0.0", + "@radix-ui/react-arrow": "1.1.7", + "@radix-ui/react-compose-refs": "1.1.2", + "@radix-ui/react-context": "1.1.2", + "@radix-ui/react-primitive": "2.1.3", + "@radix-ui/react-use-callback-ref": "1.1.1", + "@radix-ui/react-use-layout-effect": "1.1.1", + "@radix-ui/react-use-rect": "1.1.1", + "@radix-ui/react-use-size": "1.1.1", + "@radix-ui/rect": "1.1.1" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-portal": { + "version": "1.1.9", + "resolved": "https://registry.npmjs.org/@radix-ui/react-portal/-/react-portal-1.1.9.tgz", + "integrity": "sha512-bpIxvq03if6UNwXZ+HTK71JLh4APvnXntDc6XOX8UVq4XQOVl7lwok0AvIl+b8zgCw3fSaVTZMpAPPagXbKmHQ==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-primitive": "2.1.3", + "@radix-ui/react-use-layout-effect": "1.1.1" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-presence": { + "version": "1.1.5", + "resolved": "https://registry.npmjs.org/@radix-ui/react-presence/-/react-presence-1.1.5.tgz", + "integrity": "sha512-/jfEwNDdQVBCNvjkGit4h6pMOzq8bHkopq458dPt2lMjx+eBQUohZNG9A7DtO/O5ukSbxuaNGXMjHicgwy6rQQ==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-compose-refs": "1.1.2", + "@radix-ui/react-use-layout-effect": "1.1.1" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-primitive": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/@radix-ui/react-primitive/-/react-primitive-2.1.3.tgz", + "integrity": "sha512-m9gTwRkhy2lvCPe6QJp4d3G1TYEUHn/FzJUtq9MjH46an1wJU+GdoGC5VLof8RX8Ft/DlpshApkhswDLZzHIcQ==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-slot": "1.2.3" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-primitive/node_modules/@radix-ui/react-slot": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/@radix-ui/react-slot/-/react-slot-1.2.3.tgz", + "integrity": "sha512-aeNmHnBxbi2St0au6VBVC7JXFlhLlOnvIIlePNniyUNAClzmtAUEY8/pBiK3iHjufOlwA+c20/8jngo7xcrg8A==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-compose-refs": "1.1.2" + }, + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-progress": { + "version": "1.1.8", + "resolved": "https://registry.npmjs.org/@radix-ui/react-progress/-/react-progress-1.1.8.tgz", + "integrity": "sha512-+gISHcSPUJ7ktBy9RnTqbdKW78bcGke3t6taawyZ71pio1JewwGSJizycs7rLhGTvMJYCQB1DBK4KQsxs7U8dA==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-context": "1.1.3", + "@radix-ui/react-primitive": "2.1.4" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-progress/node_modules/@radix-ui/react-context": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/@radix-ui/react-context/-/react-context-1.1.3.tgz", + "integrity": "sha512-ieIFACdMpYfMEjF0rEf5KLvfVyIkOz6PDGyNnP+u+4xQ6jny3VCgA4OgXOwNx2aUkxn8zx9fiVcM8CfFYv9Lxw==", + "license": "MIT", + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-progress/node_modules/@radix-ui/react-primitive": { + "version": "2.1.4", + "resolved": "https://registry.npmjs.org/@radix-ui/react-primitive/-/react-primitive-2.1.4.tgz", + "integrity": "sha512-9hQc4+GNVtJAIEPEqlYqW5RiYdrr8ea5XQ0ZOnD6fgru+83kqT15mq2OCcbe8KnjRZl5vF3ks69AKz3kh1jrhg==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-slot": "1.2.4" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-roving-focus": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/@radix-ui/react-roving-focus/-/react-roving-focus-1.1.11.tgz", + "integrity": "sha512-7A6S9jSgm/S+7MdtNDSb+IU859vQqJ/QAtcYQcfFC6W8RS4IxIZDldLR0xqCFZ6DCyrQLjLPsxtTNch5jVA4lA==", + "license": "MIT", + "dependencies": { + "@radix-ui/primitive": "1.1.3", + "@radix-ui/react-collection": "1.1.7", + "@radix-ui/react-compose-refs": "1.1.2", + "@radix-ui/react-context": "1.1.2", + "@radix-ui/react-direction": "1.1.1", + "@radix-ui/react-id": "1.1.1", + "@radix-ui/react-primitive": "2.1.3", + "@radix-ui/react-use-callback-ref": "1.1.1", + "@radix-ui/react-use-controllable-state": "1.2.2" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-scroll-area": { + "version": "1.2.10", + "resolved": "https://registry.npmjs.org/@radix-ui/react-scroll-area/-/react-scroll-area-1.2.10.tgz", + "integrity": "sha512-tAXIa1g3sM5CGpVT0uIbUx/U3Gs5N8T52IICuCtObaos1S8fzsrPXG5WObkQN3S6NVl6wKgPhAIiBGbWnvc97A==", + "license": "MIT", + "dependencies": { + "@radix-ui/number": "1.1.1", + "@radix-ui/primitive": "1.1.3", + "@radix-ui/react-compose-refs": "1.1.2", + "@radix-ui/react-context": "1.1.2", + "@radix-ui/react-direction": "1.1.1", + "@radix-ui/react-presence": "1.1.5", + "@radix-ui/react-primitive": "2.1.3", + "@radix-ui/react-use-callback-ref": "1.1.1", + "@radix-ui/react-use-layout-effect": "1.1.1" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-select": { + "version": "2.2.6", + "resolved": "https://registry.npmjs.org/@radix-ui/react-select/-/react-select-2.2.6.tgz", + "integrity": "sha512-I30RydO+bnn2PQztvo25tswPH+wFBjehVGtmagkU78yMdwTwVf12wnAOF+AeP8S2N8xD+5UPbGhkUfPyvT+mwQ==", + "license": "MIT", + "dependencies": { + "@radix-ui/number": "1.1.1", + "@radix-ui/primitive": "1.1.3", + "@radix-ui/react-collection": "1.1.7", + "@radix-ui/react-compose-refs": "1.1.2", + "@radix-ui/react-context": "1.1.2", + "@radix-ui/react-direction": "1.1.1", + "@radix-ui/react-dismissable-layer": "1.1.11", + "@radix-ui/react-focus-guards": "1.1.3", + "@radix-ui/react-focus-scope": "1.1.7", + "@radix-ui/react-id": "1.1.1", + "@radix-ui/react-popper": "1.2.8", + "@radix-ui/react-portal": "1.1.9", + "@radix-ui/react-primitive": "2.1.3", + "@radix-ui/react-slot": "1.2.3", + "@radix-ui/react-use-callback-ref": "1.1.1", + "@radix-ui/react-use-controllable-state": "1.2.2", + "@radix-ui/react-use-layout-effect": "1.1.1", + "@radix-ui/react-use-previous": "1.1.1", + "@radix-ui/react-visually-hidden": "1.2.3", + "aria-hidden": "^1.2.4", + "react-remove-scroll": "^2.6.3" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-select/node_modules/@radix-ui/react-slot": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/@radix-ui/react-slot/-/react-slot-1.2.3.tgz", + "integrity": "sha512-aeNmHnBxbi2St0au6VBVC7JXFlhLlOnvIIlePNniyUNAClzmtAUEY8/pBiK3iHjufOlwA+c20/8jngo7xcrg8A==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-compose-refs": "1.1.2" + }, + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-separator": { + "version": "1.1.8", + "resolved": "https://registry.npmjs.org/@radix-ui/react-separator/-/react-separator-1.1.8.tgz", + "integrity": "sha512-sDvqVY4itsKwwSMEe0jtKgfTh+72Sy3gPmQpjqcQneqQ4PFmr/1I0YA+2/puilhggCe2gJcx5EBAYFkWkdpa5g==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-primitive": "2.1.4" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-separator/node_modules/@radix-ui/react-primitive": { + "version": "2.1.4", + "resolved": "https://registry.npmjs.org/@radix-ui/react-primitive/-/react-primitive-2.1.4.tgz", + "integrity": "sha512-9hQc4+GNVtJAIEPEqlYqW5RiYdrr8ea5XQ0ZOnD6fgru+83kqT15mq2OCcbe8KnjRZl5vF3ks69AKz3kh1jrhg==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-slot": "1.2.4" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-slot": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/@radix-ui/react-slot/-/react-slot-1.2.4.tgz", + "integrity": "sha512-Jl+bCv8HxKnlTLVrcDE8zTMJ09R9/ukw4qBs/oZClOfoQk/cOTbDn+NceXfV7j09YPVQUryJPHurafcSg6EVKA==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-compose-refs": "1.1.2" + }, + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-switch": { + "version": "1.2.6", + "resolved": "https://registry.npmjs.org/@radix-ui/react-switch/-/react-switch-1.2.6.tgz", + "integrity": "sha512-bByzr1+ep1zk4VubeEVViV592vu2lHE2BZY5OnzehZqOOgogN80+mNtCqPkhn2gklJqOpxWgPoYTSnhBCqpOXQ==", + "license": "MIT", + "dependencies": { + "@radix-ui/primitive": "1.1.3", + "@radix-ui/react-compose-refs": "1.1.2", + "@radix-ui/react-context": "1.1.2", + "@radix-ui/react-primitive": "2.1.3", + "@radix-ui/react-use-controllable-state": "1.2.2", + "@radix-ui/react-use-previous": "1.1.1", + "@radix-ui/react-use-size": "1.1.1" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-tabs": { + "version": "1.1.13", + "resolved": "https://registry.npmjs.org/@radix-ui/react-tabs/-/react-tabs-1.1.13.tgz", + "integrity": "sha512-7xdcatg7/U+7+Udyoj2zodtI9H/IIopqo+YOIcZOq1nJwXWBZ9p8xiu5llXlekDbZkca79a/fozEYQXIA4sW6A==", + "license": "MIT", + "dependencies": { + "@radix-ui/primitive": "1.1.3", + "@radix-ui/react-context": "1.1.2", + "@radix-ui/react-direction": "1.1.1", + "@radix-ui/react-id": "1.1.1", + "@radix-ui/react-presence": "1.1.5", + "@radix-ui/react-primitive": "2.1.3", + "@radix-ui/react-roving-focus": "1.1.11", + "@radix-ui/react-use-controllable-state": "1.2.2" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-toast": { + "version": "1.2.15", + "resolved": "https://registry.npmjs.org/@radix-ui/react-toast/-/react-toast-1.2.15.tgz", + "integrity": "sha512-3OSz3TacUWy4WtOXV38DggwxoqJK4+eDkNMl5Z/MJZaoUPaP4/9lf81xXMe1I2ReTAptverZUpbPY4wWwWyL5g==", + "license": "MIT", + "dependencies": { + "@radix-ui/primitive": "1.1.3", + "@radix-ui/react-collection": "1.1.7", + "@radix-ui/react-compose-refs": "1.1.2", + "@radix-ui/react-context": "1.1.2", + "@radix-ui/react-dismissable-layer": "1.1.11", + "@radix-ui/react-portal": "1.1.9", + "@radix-ui/react-presence": "1.1.5", + "@radix-ui/react-primitive": "2.1.3", + "@radix-ui/react-use-callback-ref": "1.1.1", + "@radix-ui/react-use-controllable-state": "1.2.2", + "@radix-ui/react-use-layout-effect": "1.1.1", + "@radix-ui/react-visually-hidden": "1.2.3" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-tooltip": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/@radix-ui/react-tooltip/-/react-tooltip-1.2.8.tgz", + "integrity": "sha512-tY7sVt1yL9ozIxvmbtN5qtmH2krXcBCfjEiCgKGLqunJHvgvZG2Pcl2oQ3kbcZARb1BGEHdkLzcYGO8ynVlieg==", + "license": "MIT", + "dependencies": { + "@radix-ui/primitive": "1.1.3", + "@radix-ui/react-compose-refs": "1.1.2", + "@radix-ui/react-context": "1.1.2", + "@radix-ui/react-dismissable-layer": "1.1.11", + "@radix-ui/react-id": "1.1.1", + "@radix-ui/react-popper": "1.2.8", + "@radix-ui/react-portal": "1.1.9", + "@radix-ui/react-presence": "1.1.5", + "@radix-ui/react-primitive": "2.1.3", + "@radix-ui/react-slot": "1.2.3", + "@radix-ui/react-use-controllable-state": "1.2.2", + "@radix-ui/react-visually-hidden": "1.2.3" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-tooltip/node_modules/@radix-ui/react-slot": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/@radix-ui/react-slot/-/react-slot-1.2.3.tgz", + "integrity": "sha512-aeNmHnBxbi2St0au6VBVC7JXFlhLlOnvIIlePNniyUNAClzmtAUEY8/pBiK3iHjufOlwA+c20/8jngo7xcrg8A==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-compose-refs": "1.1.2" + }, + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-use-callback-ref": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@radix-ui/react-use-callback-ref/-/react-use-callback-ref-1.1.1.tgz", + "integrity": "sha512-FkBMwD+qbGQeMu1cOHnuGB6x4yzPjho8ap5WtbEJ26umhgqVXbhekKUQO+hZEL1vU92a3wHwdp0HAcqAUF5iDg==", + "license": "MIT", + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-use-controllable-state": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/@radix-ui/react-use-controllable-state/-/react-use-controllable-state-1.2.2.tgz", + "integrity": "sha512-BjasUjixPFdS+NKkypcyyN5Pmg83Olst0+c6vGov0diwTEo6mgdqVR6hxcEgFuh4QrAs7Rc+9KuGJ9TVCj0Zzg==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-use-effect-event": "0.0.2", + "@radix-ui/react-use-layout-effect": "1.1.1" + }, + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-use-effect-event": { + "version": "0.0.2", + "resolved": "https://registry.npmjs.org/@radix-ui/react-use-effect-event/-/react-use-effect-event-0.0.2.tgz", + "integrity": "sha512-Qp8WbZOBe+blgpuUT+lw2xheLP8q0oatc9UpmiemEICxGvFLYmHm9QowVZGHtJlGbS6A6yJ3iViad/2cVjnOiA==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-use-layout-effect": "1.1.1" + }, + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-use-escape-keydown": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@radix-ui/react-use-escape-keydown/-/react-use-escape-keydown-1.1.1.tgz", + "integrity": "sha512-Il0+boE7w/XebUHyBjroE+DbByORGR9KKmITzbR7MyQ4akpORYP/ZmbhAr0DG7RmmBqoOnZdy2QlvajJ2QA59g==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-use-callback-ref": "1.1.1" + }, + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-use-layout-effect": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@radix-ui/react-use-layout-effect/-/react-use-layout-effect-1.1.1.tgz", + "integrity": "sha512-RbJRS4UWQFkzHTTwVymMTUv8EqYhOp8dOOviLj2ugtTiXRaRQS7GLGxZTLL1jWhMeoSCf5zmcZkqTl9IiYfXcQ==", + "license": "MIT", + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-use-previous": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@radix-ui/react-use-previous/-/react-use-previous-1.1.1.tgz", + "integrity": "sha512-2dHfToCj/pzca2Ck724OZ5L0EVrr3eHRNsG/b3xQJLA2hZpVCS99bLAX+hm1IHXDEnzU6by5z/5MIY794/a8NQ==", + "license": "MIT", + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-use-rect": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@radix-ui/react-use-rect/-/react-use-rect-1.1.1.tgz", + "integrity": "sha512-QTYuDesS0VtuHNNvMh+CjlKJ4LJickCMUAqjlE3+j8w+RlRpwyX3apEQKGFzbZGdo7XNG1tXa+bQqIE7HIXT2w==", + "license": "MIT", + "dependencies": { + "@radix-ui/rect": "1.1.1" + }, + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-use-size": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@radix-ui/react-use-size/-/react-use-size-1.1.1.tgz", + "integrity": "sha512-ewrXRDTAqAXlkl6t/fkXWNAhFX9I+CkKlw6zjEwk86RSPKwZr3xpBRso655aqYafwtnbpHLj6toFzmd6xdVptQ==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-use-layout-effect": "1.1.1" + }, + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-visually-hidden": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/@radix-ui/react-visually-hidden/-/react-visually-hidden-1.2.3.tgz", + "integrity": "sha512-pzJq12tEaaIhqjbzpCuv/OypJY/BPavOofm+dbab+MHLajy277+1lLm6JFcGgF5eskJ6mquGirhXY2GD/8u8Ug==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-primitive": "2.1.3" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/rect": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@radix-ui/rect/-/rect-1.1.1.tgz", + "integrity": "sha512-HPwpGIzkl28mWyZqG52jiqDJ12waP11Pa1lGoiyUkIEuMLBP0oeK/C89esbXrxsky5we7dfd8U58nm0SgAWpVw==", + "license": "MIT" + }, + "node_modules/@rolldown/pluginutils": { + "version": "1.0.0-beta.27", + "resolved": "https://registry.npmjs.org/@rolldown/pluginutils/-/pluginutils-1.0.0-beta.27.tgz", + "integrity": "sha512-+d0F4MKMCbeVUJwG96uQ4SgAznZNSq93I3V+9NHA4OpvqG8mRCpGdKmK8l/dl02h2CCDHwW2FqilnTyDcAnqjA==", + "dev": true, + "license": "MIT" + }, + "node_modules/@rollup/rollup-android-arm-eabi": { + "version": "4.60.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.60.1.tgz", + "integrity": "sha512-d6FinEBLdIiK+1uACUttJKfgZREXrF0Qc2SmLII7W2AD8FfiZ9Wjd+rD/iRuf5s5dWrr1GgwXCvPqOuDquOowA==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ] + }, + "node_modules/@rollup/rollup-android-arm64": { + "version": "4.60.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.60.1.tgz", + "integrity": "sha512-YjG/EwIDvvYI1YvYbHvDz/BYHtkY4ygUIXHnTdLhG+hKIQFBiosfWiACWortsKPKU/+dUwQQCKQM3qrDe8c9BA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ] + }, + "node_modules/@rollup/rollup-darwin-arm64": { + "version": "4.60.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.60.1.tgz", + "integrity": "sha512-mjCpF7GmkRtSJwon+Rq1N8+pI+8l7w5g9Z3vWj4T7abguC4Czwi3Yu/pFaLvA3TTeMVjnu3ctigusqWUfjZzvw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@rollup/rollup-darwin-x64": { + "version": "4.60.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.60.1.tgz", + "integrity": "sha512-haZ7hJ1JT4e9hqkoT9R/19XW2QKqjfJVv+i5AGg57S+nLk9lQnJ1F/eZloRO3o9Scy9CM3wQ9l+dkXtcBgN5Ew==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@rollup/rollup-freebsd-arm64": { + "version": "4.60.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.60.1.tgz", + "integrity": "sha512-czw90wpQq3ZsAVBlinZjAYTKduOjTywlG7fEeWKUA7oCmpA8xdTkxZZlwNJKWqILlq0wehoZcJYfBvOyhPTQ6w==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ] + }, + "node_modules/@rollup/rollup-freebsd-x64": { + "version": "4.60.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.60.1.tgz", + "integrity": "sha512-KVB2rqsxTHuBtfOeySEyzEOB7ltlB/ux38iu2rBQzkjbwRVlkhAGIEDiiYnO2kFOkJp+Z7pUXKyrRRFuFUKt+g==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ] + }, + "node_modules/@rollup/rollup-linux-arm-gnueabihf": { + "version": "4.60.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.60.1.tgz", + "integrity": "sha512-L+34Qqil+v5uC0zEubW7uByo78WOCIrBvci69E7sFASRl0X7b/MB6Cqd1lky/CtcSVTydWa2WZwFuWexjS5o6g==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm-musleabihf": { + "version": "4.60.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.60.1.tgz", + "integrity": "sha512-n83O8rt4v34hgFzlkb1ycniJh7IR5RCIqt6mz1VRJD6pmhRi0CXdmfnLu9dIUS6buzh60IvACM842Ffb3xd6Gg==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm64-gnu": { + "version": "4.60.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.60.1.tgz", + "integrity": "sha512-Nql7sTeAzhTAja3QXeAI48+/+GjBJ+QmAH13snn0AJSNL50JsDqotyudHyMbO2RbJkskbMbFJfIJKWA6R1LCJQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm64-musl": { + "version": "4.60.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.60.1.tgz", + "integrity": "sha512-+pUymDhd0ys9GcKZPPWlFiZ67sTWV5UU6zOJat02M1+PiuSGDziyRuI/pPue3hoUwm2uGfxdL+trT6Z9rxnlMA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-loong64-gnu": { + "version": "4.60.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loong64-gnu/-/rollup-linux-loong64-gnu-4.60.1.tgz", + "integrity": "sha512-VSvgvQeIcsEvY4bKDHEDWcpW4Yw7BtlKG1GUT4FzBUlEKQK0rWHYBqQt6Fm2taXS+1bXvJT6kICu5ZwqKCnvlQ==", + "cpu": [ + "loong64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-loong64-musl": { + "version": "4.60.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loong64-musl/-/rollup-linux-loong64-musl-4.60.1.tgz", + "integrity": "sha512-4LqhUomJqwe641gsPp6xLfhqWMbQV04KtPp7/dIp0nzPxAkNY1AbwL5W0MQpcalLYk07vaW9Kp1PBhdpZYYcEw==", + "cpu": [ + "loong64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-ppc64-gnu": { + "version": "4.60.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-ppc64-gnu/-/rollup-linux-ppc64-gnu-4.60.1.tgz", + "integrity": "sha512-tLQQ9aPvkBxOc/EUT6j3pyeMD6Hb8QF2BTBnCQWP/uu1lhc9AIrIjKnLYMEroIz/JvtGYgI9dF3AxHZNaEH0rw==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-ppc64-musl": { + "version": "4.60.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-ppc64-musl/-/rollup-linux-ppc64-musl-4.60.1.tgz", + "integrity": "sha512-RMxFhJwc9fSXP6PqmAz4cbv3kAyvD1etJFjTx4ONqFP9DkTkXsAMU4v3Vyc5BgzC+anz7nS/9tp4obsKfqkDHg==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-riscv64-gnu": { + "version": "4.60.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.60.1.tgz", + "integrity": "sha512-QKgFl+Yc1eEk6MmOBfRHYF6lTxiiiV3/z/BRrbSiW2I7AFTXoBFvdMEyglohPj//2mZS4hDOqeB0H1ACh3sBbg==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-riscv64-musl": { + "version": "4.60.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-musl/-/rollup-linux-riscv64-musl-4.60.1.tgz", + "integrity": "sha512-RAjXjP/8c6ZtzatZcA1RaQr6O1TRhzC+adn8YZDnChliZHviqIjmvFwHcxi4JKPSDAt6Uhf/7vqcBzQJy0PDJg==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-s390x-gnu": { + "version": "4.60.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.60.1.tgz", + "integrity": "sha512-wcuocpaOlaL1COBYiA89O6yfjlp3RwKDeTIA0hM7OpmhR1Bjo9j31G1uQVpDlTvwxGn2nQs65fBFL5UFd76FcQ==", + "cpu": [ + "s390x" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-x64-gnu": { + "version": "4.60.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.60.1.tgz", + "integrity": "sha512-77PpsFQUCOiZR9+LQEFg9GClyfkNXj1MP6wRnzYs0EeWbPcHs02AXu4xuUbM1zhwn3wqaizle3AEYg5aeoohhg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-x64-musl": { + "version": "4.60.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.60.1.tgz", + "integrity": "sha512-5cIATbk5vynAjqqmyBjlciMJl1+R/CwX9oLk/EyiFXDWd95KpHdrOJT//rnUl4cUcskrd0jCCw3wpZnhIHdD9w==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-openbsd-x64": { + "version": "4.60.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-openbsd-x64/-/rollup-openbsd-x64-4.60.1.tgz", + "integrity": "sha512-cl0w09WsCi17mcmWqqglez9Gk8isgeWvoUZ3WiJFYSR3zjBQc2J5/ihSjpl+VLjPqjQ/1hJRcqBfLjssREQILw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ] + }, + "node_modules/@rollup/rollup-openharmony-arm64": { + "version": "4.60.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-openharmony-arm64/-/rollup-openharmony-arm64-4.60.1.tgz", + "integrity": "sha512-4Cv23ZrONRbNtbZa37mLSueXUCtN7MXccChtKpUnQNgF010rjrjfHx3QxkS2PI7LqGT5xXyYs1a7LbzAwT0iCA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openharmony" + ] + }, + "node_modules/@rollup/rollup-win32-arm64-msvc": { + "version": "4.60.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.60.1.tgz", + "integrity": "sha512-i1okWYkA4FJICtr7KpYzFpRTHgy5jdDbZiWfvny21iIKky5YExiDXP+zbXzm3dUcFpkEeYNHgQ5fuG236JPq0g==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@rollup/rollup-win32-ia32-msvc": { + "version": "4.60.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.60.1.tgz", + "integrity": "sha512-u09m3CuwLzShA0EYKMNiFgcjjzwqtUMLmuCJLeZWjjOYA3IT2Di09KaxGBTP9xVztWyIWjVdsB2E9goMjZvTQg==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@rollup/rollup-win32-x64-gnu": { + "version": "4.60.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-gnu/-/rollup-win32-x64-gnu-4.60.1.tgz", + "integrity": "sha512-k+600V9Zl1CM7eZxJgMyTUzmrmhB/0XZnF4pRypKAlAgxmedUA+1v9R+XOFv56W4SlHEzfeMtzujLJD22Uz5zg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@rollup/rollup-win32-x64-msvc": { + "version": "4.60.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.60.1.tgz", + "integrity": "sha512-lWMnixq/QzxyhTV6NjQJ4SFo1J6PvOX8vUx5Wb4bBPsEb+8xZ89Bz6kOXpfXj9ak9AHTQVQzlgzBEc1SyM27xQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@sindresorhus/is": { + "version": "4.6.0", + "resolved": "https://registry.npmjs.org/@sindresorhus/is/-/is-4.6.0.tgz", + "integrity": "sha512-t09vSN3MdfsyCHoFcTRCH/iUtG7OJ0CsjzB8cjAmKc/va/kIgeDI/TxsigdncE/4be734m0cvIYwNaV4i2XqAw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sindresorhus/is?sponsor=1" + } + }, + "node_modules/@szmarczak/http-timer": { + "version": "4.0.6", + "resolved": "https://registry.npmjs.org/@szmarczak/http-timer/-/http-timer-4.0.6.tgz", + "integrity": "sha512-4BAffykYOgO+5nzBWYwE3W90sBgLJoUPRWWcL8wlyiM8IB8ipJz3UMJ9KXQd1RKQXpKp8Tutn80HZtWsu2u76w==", + "dev": true, + "license": "MIT", + "dependencies": { + "defer-to-connect": "^2.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/@tailwindcss/node": { + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/@tailwindcss/node/-/node-4.2.2.tgz", + "integrity": "sha512-pXS+wJ2gZpVXqFaUEjojq7jzMpTGf8rU6ipJz5ovJV6PUGmlJ+jvIwGrzdHdQ80Sg+wmQxUFuoW1UAAwHNEdFA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/remapping": "^2.3.5", + "enhanced-resolve": "^5.19.0", + "jiti": "^2.6.1", + "lightningcss": "1.32.0", + "magic-string": "^0.30.21", + "source-map-js": "^1.2.1", + "tailwindcss": "4.2.2" + } + }, + "node_modules/@tailwindcss/oxide": { + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide/-/oxide-4.2.2.tgz", + "integrity": "sha512-qEUA07+E5kehxYp9BVMpq9E8vnJuBHfJEC0vPC5e7iL/hw7HR61aDKoVoKzrG+QKp56vhNZe4qwkRmMC0zDLvg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 20" + }, + "optionalDependencies": { + "@tailwindcss/oxide-android-arm64": "4.2.2", + "@tailwindcss/oxide-darwin-arm64": "4.2.2", + "@tailwindcss/oxide-darwin-x64": "4.2.2", + "@tailwindcss/oxide-freebsd-x64": "4.2.2", + "@tailwindcss/oxide-linux-arm-gnueabihf": "4.2.2", + "@tailwindcss/oxide-linux-arm64-gnu": "4.2.2", + "@tailwindcss/oxide-linux-arm64-musl": "4.2.2", + "@tailwindcss/oxide-linux-x64-gnu": "4.2.2", + "@tailwindcss/oxide-linux-x64-musl": "4.2.2", + "@tailwindcss/oxide-wasm32-wasi": "4.2.2", + "@tailwindcss/oxide-win32-arm64-msvc": "4.2.2", + "@tailwindcss/oxide-win32-x64-msvc": "4.2.2" + } + }, + "node_modules/@tailwindcss/oxide-android-arm64": { + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-android-arm64/-/oxide-android-arm64-4.2.2.tgz", + "integrity": "sha512-dXGR1n+P3B6748jZO/SvHZq7qBOqqzQ+yFrXpoOWWALWndF9MoSKAT3Q0fYgAzYzGhxNYOoysRvYlpixRBBoDg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">= 20" + } + }, + "node_modules/@tailwindcss/oxide-darwin-arm64": { + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-darwin-arm64/-/oxide-darwin-arm64-4.2.2.tgz", + "integrity": "sha512-iq9Qjr6knfMpZHj55/37ouZeykwbDqF21gPFtfnhCCKGDcPI/21FKC9XdMO/XyBM7qKORx6UIhGgg6jLl7BZlg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">= 20" + } + }, + "node_modules/@tailwindcss/oxide-darwin-x64": { + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-darwin-x64/-/oxide-darwin-x64-4.2.2.tgz", + "integrity": "sha512-BlR+2c3nzc8f2G639LpL89YY4bdcIdUmiOOkv2GQv4/4M0vJlpXEa0JXNHhCHU7VWOKWT/CjqHdTP8aUuDJkuw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">= 20" + } + }, + "node_modules/@tailwindcss/oxide-freebsd-x64": { + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-freebsd-x64/-/oxide-freebsd-x64-4.2.2.tgz", + "integrity": "sha512-YUqUgrGMSu2CDO82hzlQ5qSb5xmx3RUrke/QgnoEx7KvmRJHQuZHZmZTLSuuHwFf0DJPybFMXMYf+WJdxHy/nQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">= 20" + } + }, + "node_modules/@tailwindcss/oxide-linux-arm-gnueabihf": { + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-arm-gnueabihf/-/oxide-linux-arm-gnueabihf-4.2.2.tgz", + "integrity": "sha512-FPdhvsW6g06T9BWT0qTwiVZYE2WIFo2dY5aCSpjG/S/u1tby+wXoslXS0kl3/KXnULlLr1E3NPRRw0g7t2kgaQ==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 20" + } + }, + "node_modules/@tailwindcss/oxide-linux-arm64-gnu": { + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-arm64-gnu/-/oxide-linux-arm64-gnu-4.2.2.tgz", + "integrity": "sha512-4og1V+ftEPXGttOO7eCmW7VICmzzJWgMx+QXAJRAhjrSjumCwWqMfkDrNu1LXEQzNAwz28NCUpucgQPrR4S2yw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 20" + } + }, + "node_modules/@tailwindcss/oxide-linux-arm64-musl": { + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-arm64-musl/-/oxide-linux-arm64-musl-4.2.2.tgz", + "integrity": "sha512-oCfG/mS+/+XRlwNjnsNLVwnMWYH7tn/kYPsNPh+JSOMlnt93mYNCKHYzylRhI51X+TbR+ufNhhKKzm6QkqX8ag==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 20" + } + }, + "node_modules/@tailwindcss/oxide-linux-x64-gnu": { + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-x64-gnu/-/oxide-linux-x64-gnu-4.2.2.tgz", + "integrity": "sha512-rTAGAkDgqbXHNp/xW0iugLVmX62wOp2PoE39BTCGKjv3Iocf6AFbRP/wZT/kuCxC9QBh9Pu8XPkv/zCZB2mcMg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 20" + } + }, + "node_modules/@tailwindcss/oxide-linux-x64-musl": { + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-x64-musl/-/oxide-linux-x64-musl-4.2.2.tgz", + "integrity": "sha512-XW3t3qwbIwiSyRCggeO2zxe3KWaEbM0/kW9e8+0XpBgyKU4ATYzcVSMKteZJ1iukJ3HgHBjbg9P5YPRCVUxlnQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 20" + } + }, + "node_modules/@tailwindcss/oxide-wasm32-wasi": { + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-wasm32-wasi/-/oxide-wasm32-wasi-4.2.2.tgz", + "integrity": "sha512-eKSztKsmEsn1O5lJ4ZAfyn41NfG7vzCg496YiGtMDV86jz1q/irhms5O0VrY6ZwTUkFy/EKG3RfWgxSI3VbZ8Q==", + "bundleDependencies": [ + "@napi-rs/wasm-runtime", + "@emnapi/core", + "@emnapi/runtime", + "@tybys/wasm-util", + "@emnapi/wasi-threads", + "tslib" + ], + "cpu": [ + "wasm32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "@emnapi/core": "^1.8.1", + "@emnapi/runtime": "^1.8.1", + "@emnapi/wasi-threads": "^1.1.0", + "@napi-rs/wasm-runtime": "^1.1.1", + "@tybys/wasm-util": "^0.10.1", + "tslib": "^2.8.1" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@tailwindcss/oxide-win32-arm64-msvc": { + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-win32-arm64-msvc/-/oxide-win32-arm64-msvc-4.2.2.tgz", + "integrity": "sha512-qPmaQM4iKu5mxpsrWZMOZRgZv1tOZpUm+zdhhQP0VhJfyGGO3aUKdbh3gDZc/dPLQwW4eSqWGrrcWNBZWUWaXQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">= 20" + } + }, + "node_modules/@tailwindcss/oxide-win32-x64-msvc": { + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-win32-x64-msvc/-/oxide-win32-x64-msvc-4.2.2.tgz", + "integrity": "sha512-1T/37VvI7WyH66b+vqHj/cLwnCxt7Qt3WFu5Q8hk65aOvlwAhs7rAp1VkulBJw/N4tMirXjVnylTR72uI0HGcA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">= 20" + } + }, + "node_modules/@tailwindcss/vite": { + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/@tailwindcss/vite/-/vite-4.2.2.tgz", + "integrity": "sha512-mEiF5HO1QqCLXoNEfXVA1Tzo+cYsrqV7w9Juj2wdUFyW07JRenqMG225MvPwr3ZD9N1bFQj46X7r33iHxLUW0w==", + "dev": true, + "license": "MIT", + "dependencies": { + "@tailwindcss/node": "4.2.2", + "@tailwindcss/oxide": "4.2.2", + "tailwindcss": "4.2.2" + }, + "peerDependencies": { + "vite": "^5.2.0 || ^6 || ^7 || ^8" + } + }, + "node_modules/@tootallnate/once": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/@tootallnate/once/-/once-2.0.0.tgz", + "integrity": "sha512-XCuKFP5PS55gnMVu3dty8KPatLqUoy/ZYzDzAGCQ8JNFCkLXzmI7vNHCR+XpbZaMWQK/vQubr7PkYq8g470J/A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 10" + } + }, + "node_modules/@types/babel__core": { + "version": "7.20.5", + "resolved": "https://registry.npmjs.org/@types/babel__core/-/babel__core-7.20.5.tgz", + "integrity": "sha512-qoQprZvz5wQFJwMDqeseRXWv3rqMvhgpbXFfVyWhbx9X47POIA6i/+dXefEmZKoAgOaTdaIgNSMqMIU61yRyzA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/parser": "^7.20.7", + "@babel/types": "^7.20.7", + "@types/babel__generator": "*", + "@types/babel__template": "*", + "@types/babel__traverse": "*" + } + }, + "node_modules/@types/babel__generator": { + "version": "7.27.0", + "resolved": "https://registry.npmjs.org/@types/babel__generator/-/babel__generator-7.27.0.tgz", + "integrity": "sha512-ufFd2Xi92OAVPYsy+P4n7/U7e68fex0+Ee8gSG9KX7eo084CWiQ4sdxktvdl0bOPupXtVJPY19zk6EwWqUQ8lg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/types": "^7.0.0" + } + }, + "node_modules/@types/babel__template": { + "version": "7.4.4", + "resolved": "https://registry.npmjs.org/@types/babel__template/-/babel__template-7.4.4.tgz", + "integrity": "sha512-h/NUaSyG5EyxBIp8YRxo4RMe2/qQgvyowRwVMzhYhBCONbW8PUsg4lkFMrhgZhUe5z3L3MiLDuvyJ/CaPa2A8A==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/parser": "^7.1.0", + "@babel/types": "^7.0.0" + } + }, + "node_modules/@types/babel__traverse": { + "version": "7.28.0", + "resolved": "https://registry.npmjs.org/@types/babel__traverse/-/babel__traverse-7.28.0.tgz", + "integrity": "sha512-8PvcXf70gTDZBgt9ptxJ8elBeBjcLOAcOtoO/mPJjtji1+CdGbHgm77om1GrsPxsiE+uXIpNSK64UYaIwQXd4Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/types": "^7.28.2" + } + }, + "node_modules/@types/cacheable-request": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/@types/cacheable-request/-/cacheable-request-6.0.3.tgz", + "integrity": "sha512-IQ3EbTzGxIigb1I3qPZc1rWJnH0BmSKv5QYTalEwweFvyBDLSAe24zP0le/hyi7ecGfZVlIVAg4BZqb8WBwKqw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/http-cache-semantics": "*", + "@types/keyv": "^3.1.4", + "@types/node": "*", + "@types/responselike": "^1.0.0" + } + }, + "node_modules/@types/debug": { + "version": "4.1.13", + "resolved": "https://registry.npmjs.org/@types/debug/-/debug-4.1.13.tgz", + "integrity": "sha512-KSVgmQmzMwPlmtljOomayoR89W4FynCAi3E8PPs7vmDVPe84hT+vGPKkJfThkmXs0x0jAaa9U8uW8bbfyS2fWw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/ms": "*" + } + }, + "node_modules/@types/estree": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.8.tgz", + "integrity": "sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/fs-extra": { + "version": "9.0.13", + "resolved": "https://registry.npmjs.org/@types/fs-extra/-/fs-extra-9.0.13.tgz", + "integrity": "sha512-nEnwB++1u5lVDM2UI4c1+5R+FYaKfaAzS4OococimjVm3nQw3TuzH5UNsocrcTBbhnerblyHj4A49qXbIiZdpA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@types/http-cache-semantics": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/@types/http-cache-semantics/-/http-cache-semantics-4.2.0.tgz", + "integrity": "sha512-L3LgimLHXtGkWikKnsPg0/VFx9OGZaC+eN1u4r+OB1XRqH3meBIAVC2zr1WdMH+RHmnRkqliQAOHNJ/E0j/e0Q==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/keyv": { + "version": "3.1.4", + "resolved": "https://registry.npmjs.org/@types/keyv/-/keyv-3.1.4.tgz", + "integrity": "sha512-BQ5aZNSCpj7D6K2ksrRCTmKRLEpnPvWDiLPfoGyhZ++8YtiK9d/3DBKPJgry359X/P1PfruyYwvnvwFjuEiEIg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@types/ms": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/@types/ms/-/ms-2.1.0.tgz", + "integrity": "sha512-GsCCIZDE/p3i96vtEqx+7dBUGXrc7zeSK3wwPHIaRThS+9OhWIXRqzs4d6k1SVU8g91DrNRWxWUGhp5KXQb2VA==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/node": { + "version": "24.12.2", + "resolved": "https://registry.npmjs.org/@types/node/-/node-24.12.2.tgz", + "integrity": "sha512-A1sre26ke7HDIuY/M23nd9gfB+nrmhtYyMINbjI1zHJxYteKR6qSMX56FsmjMcDb3SMcjJg5BiRRgOCC/yBD0g==", + "dev": true, + "license": "MIT", + "dependencies": { + "undici-types": "~7.16.0" + } + }, + "node_modules/@types/plist": { + "version": "3.0.5", + "resolved": "https://registry.npmjs.org/@types/plist/-/plist-3.0.5.tgz", + "integrity": "sha512-E6OCaRmAe4WDmWNsL/9RMqdkkzDCY1etutkflWk4c+AcjDU07Pcz1fQwTX0TQz+Pxqn9i4L1TU3UFpjnrcDgxA==", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "@types/node": "*", + "xmlbuilder": ">=11.0.1" + } + }, + "node_modules/@types/react": { + "version": "19.2.14", + "resolved": "https://registry.npmjs.org/@types/react/-/react-19.2.14.tgz", + "integrity": "sha512-ilcTH/UniCkMdtexkoCN0bI7pMcJDvmQFPvuPvmEaYA/NSfFTAgdUSLAoVjaRJm7+6PvcM+q1zYOwS4wTYMF9w==", + "dev": true, + "license": "MIT", + "dependencies": { + "csstype": "^3.2.2" + } + }, + "node_modules/@types/react-dom": { + "version": "19.2.3", + "resolved": "https://registry.npmjs.org/@types/react-dom/-/react-dom-19.2.3.tgz", + "integrity": "sha512-jp2L/eY6fn+KgVVQAOqYItbF0VY/YApe5Mz2F0aykSO8gx31bYCZyvSeYxCHKvzHG5eZjc+zyaS5BrBWya2+kQ==", + "dev": true, + "license": "MIT", + "peerDependencies": { + "@types/react": "^19.2.0" + } + }, + "node_modules/@types/responselike": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/@types/responselike/-/responselike-1.0.3.tgz", + "integrity": "sha512-H/+L+UkTV33uf49PH5pCAUBVPNj2nDBXTN+qS1dOwyyg24l3CcicicCA7ca+HMvJBZcFgl5r8e+RR6elsb4Lyw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@types/verror": { + "version": "1.10.11", + "resolved": "https://registry.npmjs.org/@types/verror/-/verror-1.10.11.tgz", + "integrity": "sha512-RlDm9K7+o5stv0Co8i8ZRGxDbrTxhJtgjqjFyVh/tXQyl/rYtTKlnTvZ88oSTeYREWurwx20Js4kTuKCsFkUtg==", + "dev": true, + "license": "MIT", + "optional": true + }, + "node_modules/@types/yauzl": { + "version": "2.10.3", + "resolved": "https://registry.npmjs.org/@types/yauzl/-/yauzl-2.10.3.tgz", + "integrity": "sha512-oJoftv0LSuaDZE3Le4DbKX+KS9G36NzOeSap90UIK0yMA/NhKJhqlSGtNDORNRaIbQfzjXDrQa0ytJ6mNRGz/Q==", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@vitejs/plugin-react": { + "version": "4.7.0", + "resolved": "https://registry.npmjs.org/@vitejs/plugin-react/-/plugin-react-4.7.0.tgz", + "integrity": "sha512-gUu9hwfWvvEDBBmgtAowQCojwZmJ5mcLn3aufeCsitijs3+f2NsrPtlAWIR6OPiqljl96GVCUbLe0HyqIpVaoA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/core": "^7.28.0", + "@babel/plugin-transform-react-jsx-self": "^7.27.1", + "@babel/plugin-transform-react-jsx-source": "^7.27.1", + "@rolldown/pluginutils": "1.0.0-beta.27", + "@types/babel__core": "^7.20.5", + "react-refresh": "^0.17.0" + }, + "engines": { + "node": "^14.18.0 || >=16.0.0" + }, + "peerDependencies": { + "vite": "^4.2.0 || ^5.0.0 || ^6.0.0 || ^7.0.0" + } + }, + "node_modules/@xmldom/xmldom": { + "version": "0.8.12", + "resolved": "https://registry.npmjs.org/@xmldom/xmldom/-/xmldom-0.8.12.tgz", + "integrity": "sha512-9k/gHF6n/pAi/9tqr3m3aqkuiNosYTurLLUtc7xQ9sxB/wm7WPygCv8GYa6mS0fLJEHhqMC1ATYhz++U/lRHqg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10.0.0" + } + }, + "node_modules/7zip-bin": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/7zip-bin/-/7zip-bin-5.2.0.tgz", + "integrity": "sha512-ukTPVhqG4jNzMro2qA9HSCSSVJN3aN7tlb+hfqYCt3ER0yWroeA2VR38MNrOHLQ/cVj+DaIMad0kFCtWWowh/A==", + "dev": true, + "license": "MIT" + }, + "node_modules/abbrev": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/abbrev/-/abbrev-1.1.1.tgz", + "integrity": "sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q==", + "dev": true, + "license": "ISC" + }, + "node_modules/agent-base": { + "version": "7.1.4", + "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-7.1.4.tgz", + "integrity": "sha512-MnA+YT8fwfJPgBx3m60MNqakm30XOkyIoH1y6huTQvC0PwZG7ki8NacLBcrPbNoo8vEZy7Jpuk7+jMO+CUovTQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 14" + } + }, + "node_modules/agentkeepalive": { + "version": "4.6.0", + "resolved": "https://registry.npmjs.org/agentkeepalive/-/agentkeepalive-4.6.0.tgz", + "integrity": "sha512-kja8j7PjmncONqaTsB8fQ+wE2mSU2DJ9D4XKoJ5PFWIdRMa6SLSN1ff4mOr4jCbfRSsxR4keIiySJU0N9T5hIQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "humanize-ms": "^1.2.1" + }, + "engines": { + "node": ">= 8.0.0" + } + }, + "node_modules/aggregate-error": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/aggregate-error/-/aggregate-error-3.1.0.tgz", + "integrity": "sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA==", + "dev": true, + "license": "MIT", + "dependencies": { + "clean-stack": "^2.0.0", + "indent-string": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/ajv": { + "version": "6.14.0", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.14.0.tgz", + "integrity": "sha512-IWrosm/yrn43eiKqkfkHis7QioDleaXQHdDVPKg0FSwwd/DuvyX79TZnFOnYpB7dcsFAMmtFztZuXPDvSePkFw==", + "dev": true, + "license": "MIT", + "dependencies": { + "fast-deep-equal": "^3.1.1", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.4.1", + "uri-js": "^4.2.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "node_modules/ajv-keywords": { + "version": "3.5.2", + "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-3.5.2.tgz", + "integrity": "sha512-5p6WTN0DdTGVQk6VjcEju19IgaHudalcfabD7yhDGeA6bcQnmL+CpveLJq/3hvfwd1aof6L386Ougkx6RfyMIQ==", + "dev": true, + "license": "MIT", + "peerDependencies": { + "ajv": "^6.9.1" + } + }, + "node_modules/ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "license": "MIT", + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/app-builder-bin": { + "version": "5.0.0-alpha.10", + "resolved": "https://registry.npmjs.org/app-builder-bin/-/app-builder-bin-5.0.0-alpha.10.tgz", + "integrity": "sha512-Ev4jj3D7Bo+O0GPD2NMvJl+PGiBAfS7pUGawntBNpCbxtpncfUixqFj9z9Jme7V7s3LBGqsWZZP54fxBX3JKJw==", + "dev": true, + "license": "MIT" + }, + "node_modules/app-builder-lib": { + "version": "25.1.8", + "resolved": "https://registry.npmjs.org/app-builder-lib/-/app-builder-lib-25.1.8.tgz", + "integrity": "sha512-pCqe7dfsQFBABC1jeKZXQWhGcCPF3rPCXDdfqVKjIeWBcXzyC1iOWZdfFhGl+S9MyE/k//DFmC6FzuGAUudNDg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@develar/schema-utils": "~2.6.5", + "@electron/notarize": "2.5.0", + "@electron/osx-sign": "1.3.1", + "@electron/rebuild": "3.6.1", + "@electron/universal": "2.0.1", + "@malept/flatpak-bundler": "^0.4.0", + "@types/fs-extra": "9.0.13", + "async-exit-hook": "^2.0.1", + "bluebird-lst": "^1.0.9", + "builder-util": "25.1.7", + "builder-util-runtime": "9.2.10", + "chromium-pickle-js": "^0.2.0", + "config-file-ts": "0.2.8-rc1", + "debug": "^4.3.4", + "dotenv": "^16.4.5", + "dotenv-expand": "^11.0.6", + "ejs": "^3.1.8", + "electron-publish": "25.1.7", + "form-data": "^4.0.0", + "fs-extra": "^10.1.0", + "hosted-git-info": "^4.1.0", + "is-ci": "^3.0.0", + "isbinaryfile": "^5.0.0", + "js-yaml": "^4.1.0", + "json5": "^2.2.3", + "lazy-val": "^1.0.5", + "minimatch": "^10.0.0", + "resedit": "^1.7.0", + "sanitize-filename": "^1.6.3", + "semver": "^7.3.8", + "tar": "^6.1.12", + "temp-file": "^3.4.0" + }, + "engines": { + "node": ">=14.0.0" + }, + "peerDependencies": { + "dmg-builder": "25.1.8", + "electron-builder-squirrel-windows": "25.1.8" + } + }, + "node_modules/app-builder-lib/node_modules/fs-extra": { + "version": "10.1.0", + "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-10.1.0.tgz", + "integrity": "sha512-oRXApq54ETRj4eMiFzGnHWGy+zo5raudjuxN0b8H7s/RU2oW0Wvsx9O0ACRN/kRq9E8Vu/ReskGB5o3ji+FzHQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "graceful-fs": "^4.2.0", + "jsonfile": "^6.0.1", + "universalify": "^2.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/app-builder-lib/node_modules/jsonfile": { + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-6.2.0.tgz", + "integrity": "sha512-FGuPw30AdOIUTRMC2OMRtQV+jkVj2cfPqSeWXv1NEAJ1qZ5zb1X6z1mFhbfOB/iy3ssJCD+3KuZ8r8C3uVFlAg==", + "dev": true, + "license": "MIT", + "dependencies": { + "universalify": "^2.0.0" + }, + "optionalDependencies": { + "graceful-fs": "^4.1.6" + } + }, + "node_modules/app-builder-lib/node_modules/semver": { + "version": "7.7.4", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.4.tgz", + "integrity": "sha512-vFKC2IEtQnVhpT78h1Yp8wzwrf8CM+MzKMHGJZfBtzhZNycRFnXsHk6E5TxIkkMsgNS7mdX3AGB7x2QM2di4lA==", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/app-builder-lib/node_modules/universalify": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/universalify/-/universalify-2.0.1.tgz", + "integrity": "sha512-gptHNQghINnc/vTGIk0SOFGFNXw7JVrlRUtConJRlvaw6DuX0wO5Jeko9sWrMBhh+PsYAZ7oXAiOnf/UKogyiw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 10.0.0" + } + }, + "node_modules/aproba": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/aproba/-/aproba-2.1.0.tgz", + "integrity": "sha512-tLIEcj5GuR2RSTnxNKdkK0dJ/GrC7P38sUkiDmDuHfsHmbagTFAxDVIBltoklXEVIQ/f14IL8IMJ5pn9Hez1Ew==", + "dev": true, + "license": "ISC" + }, + "node_modules/are-we-there-yet": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/are-we-there-yet/-/are-we-there-yet-3.0.1.tgz", + "integrity": "sha512-QZW4EDmGwlYur0Yyf/b2uGucHQMa8aFUP7eu9ddR73vvhFyt4V0Vl3QHPcTNJ8l6qYOBdxgXdnBXQrHilfRQBg==", + "deprecated": "This package is no longer supported.", + "dev": true, + "license": "ISC", + "dependencies": { + "delegates": "^1.0.0", + "readable-stream": "^3.6.0" + }, + "engines": { + "node": "^12.13.0 || ^14.15.0 || >=16.0.0" + } + }, + "node_modules/argparse": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", + "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", + "license": "Python-2.0" + }, + "node_modules/aria-hidden": { + "version": "1.2.6", + "resolved": "https://registry.npmjs.org/aria-hidden/-/aria-hidden-1.2.6.tgz", + "integrity": "sha512-ik3ZgC9dY/lYVVM++OISsaYDeg1tb0VtP5uL3ouh1koGOaUMDPpbFIei4JkFimWUFPn90sbMNMXQAIVOlnYKJA==", + "license": "MIT", + "dependencies": { + "tslib": "^2.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/assert-plus": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz", + "integrity": "sha512-NfJ4UzBCcQGLDlQq7nHxH+tv3kyZ0hHQqF5BO6J7tNJeP5do1llPr8dZ8zHonfhAu0PHAdMkSo+8o0wxg9lZWw==", + "dev": true, + "license": "MIT", + "optional": true, + "engines": { + "node": ">=0.8" + } + }, + "node_modules/astral-regex": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/astral-regex/-/astral-regex-2.0.0.tgz", + "integrity": "sha512-Z7tMw1ytTXt5jqMcOP+OQteU1VuNK9Y02uuJtKQ1Sv69jXQKKg5cibLwGJow8yzZP+eAc18EmLGPal0bp36rvQ==", + "dev": true, + "license": "MIT", + "optional": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/async": { + "version": "3.2.6", + "resolved": "https://registry.npmjs.org/async/-/async-3.2.6.tgz", + "integrity": "sha512-htCUDlxyyCLMgaM3xXg0C0LW2xqfuQ6p05pCEIsXuyQ+a1koYKTuBMzRNwmybfLgvJDMd0r1LTn4+E0Ti6C2AA==", + "dev": true, + "license": "MIT" + }, + "node_modules/async-exit-hook": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/async-exit-hook/-/async-exit-hook-2.0.1.tgz", + "integrity": "sha512-NW2cX8m1Q7KPA7a5M2ULQeZ2wR5qI5PAbw5L0UOMxdioVk9PMZ0h1TmyZEkPYrCvYjDlFICusOu1dlEKAAeXBw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.12.0" + } + }, + "node_modules/asynckit": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", + "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==", + "dev": true, + "license": "MIT" + }, + "node_modules/at-least-node": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/at-least-node/-/at-least-node-1.0.0.tgz", + "integrity": "sha512-+q/t7Ekv1EDY2l6Gda6LLiX14rU9TV20Wa3ofeQmwPFZbOMo9DXrLbOjFaaclkXKWidIaopwAObQDqwWtGUjqg==", + "dev": true, + "license": "ISC", + "engines": { + "node": ">= 4.0.0" + } + }, + "node_modules/balanced-match": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-4.0.4.tgz", + "integrity": "sha512-BLrgEcRTwX2o6gGxGOCNyMvGSp35YofuYzw9h1IMTRmKqttAZZVU67bdb9Pr2vUHA8+j3i2tJfjO6C6+4myGTA==", + "dev": true, + "license": "MIT", + "engines": { + "node": "18 || 20 || >=22" + } + }, + "node_modules/base64-js": { + "version": "1.5.1", + "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz", + "integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT" + }, + "node_modules/baseline-browser-mapping": { + "version": "2.10.19", + "resolved": "https://registry.npmjs.org/baseline-browser-mapping/-/baseline-browser-mapping-2.10.19.tgz", + "integrity": "sha512-qCkNLi2sfBOn8XhZQ0FXsT1Ki/Yo5P90hrkRamVFRS7/KV9hpfA4HkoWNU152+8w0zPjnxo5psx5NL3PSGgv5g==", + "dev": true, + "license": "Apache-2.0", + "bin": { + "baseline-browser-mapping": "dist/cli.cjs" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/bl": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/bl/-/bl-4.1.0.tgz", + "integrity": "sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w==", + "dev": true, + "license": "MIT", + "dependencies": { + "buffer": "^5.5.0", + "inherits": "^2.0.4", + "readable-stream": "^3.4.0" + } + }, + "node_modules/bluebird": { + "version": "3.7.2", + "resolved": "https://registry.npmjs.org/bluebird/-/bluebird-3.7.2.tgz", + "integrity": "sha512-XpNj6GDQzdfW+r2Wnn7xiSAd7TM3jzkxGXBGTtWKuSXv1xUV+azxAm8jdWZN06QTQk+2N2XB9jRDkvbmQmcRtg==", + "dev": true, + "license": "MIT" + }, + "node_modules/bluebird-lst": { + "version": "1.0.9", + "resolved": "https://registry.npmjs.org/bluebird-lst/-/bluebird-lst-1.0.9.tgz", + "integrity": "sha512-7B1Rtx82hjnSD4PGLAjVWeYH3tHAcVUmChh85a3lltKQm6FresXh9ErQo6oAv6CqxttczC3/kEg8SY5NluPuUw==", + "dev": true, + "license": "MIT", + "dependencies": { + "bluebird": "^3.5.5" + } + }, + "node_modules/boolean": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/boolean/-/boolean-3.2.0.tgz", + "integrity": "sha512-d0II/GO9uf9lfUHH2BQsjxzRJZBdsjgsBiW4BvhWk/3qoKwQFjIDVN19PfX8F2D/r9PCMTtLWjYVCFrpeYUzsw==", + "deprecated": "Package no longer supported. Contact Support at https://www.npmjs.com/support for more info.", + "dev": true, + "license": "MIT", + "optional": true + }, + "node_modules/brace-expansion": { + "version": "5.0.5", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-5.0.5.tgz", + "integrity": "sha512-VZznLgtwhn+Mact9tfiwx64fA9erHH/MCXEUfB/0bX/6Fz6ny5EGTXYltMocqg4xFAQZtnO3DHWWXi8RiuN7cQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^4.0.2" + }, + "engines": { + "node": "18 || 20 || >=22" + } + }, + "node_modules/browserslist": { + "version": "4.28.2", + "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.28.2.tgz", + "integrity": "sha512-48xSriZYYg+8qXna9kwqjIVzuQxi+KYWp2+5nCYnYKPTr0LvD89Jqk2Or5ogxz0NUMfIjhh2lIUX/LyX9B4oIg==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/browserslist" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "baseline-browser-mapping": "^2.10.12", + "caniuse-lite": "^1.0.30001782", + "electron-to-chromium": "^1.5.328", + "node-releases": "^2.0.36", + "update-browserslist-db": "^1.2.3" + }, + "bin": { + "browserslist": "cli.js" + }, + "engines": { + "node": "^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7" + } + }, + "node_modules/buffer": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/buffer/-/buffer-5.7.1.tgz", + "integrity": "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT", + "dependencies": { + "base64-js": "^1.3.1", + "ieee754": "^1.1.13" + } + }, + "node_modules/buffer-crc32": { + "version": "0.2.13", + "resolved": "https://registry.npmjs.org/buffer-crc32/-/buffer-crc32-0.2.13.tgz", + "integrity": "sha512-VO9Ht/+p3SN7SKWqcrgEzjGbRSJYTx+Q1pTQC0wrWqHx0vpJraQ6GtHx8tvcg1rlK1byhU5gccxgOgj7B0TDkQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": "*" + } + }, + "node_modules/buffer-from": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.2.tgz", + "integrity": "sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/builder-util": { + "version": "25.1.7", + "resolved": "https://registry.npmjs.org/builder-util/-/builder-util-25.1.7.tgz", + "integrity": "sha512-7jPjzBwEGRbwNcep0gGNpLXG9P94VA3CPAZQCzxkFXiV2GMQKlziMbY//rXPI7WKfhsvGgFXjTcXdBEwgXw9ww==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/debug": "^4.1.6", + "7zip-bin": "~5.2.0", + "app-builder-bin": "5.0.0-alpha.10", + "bluebird-lst": "^1.0.9", + "builder-util-runtime": "9.2.10", + "chalk": "^4.1.2", + "cross-spawn": "^7.0.3", + "debug": "^4.3.4", + "fs-extra": "^10.1.0", + "http-proxy-agent": "^7.0.0", + "https-proxy-agent": "^7.0.0", + "is-ci": "^3.0.0", + "js-yaml": "^4.1.0", + "source-map-support": "^0.5.19", + "stat-mode": "^1.0.0", + "temp-file": "^3.4.0" + } + }, + "node_modules/builder-util-runtime": { + "version": "9.2.10", + "resolved": "https://registry.npmjs.org/builder-util-runtime/-/builder-util-runtime-9.2.10.tgz", + "integrity": "sha512-6p/gfG1RJSQeIbz8TK5aPNkoztgY1q5TgmGFMAXcY8itsGW6Y2ld1ALsZ5UJn8rog7hKF3zHx5iQbNQ8uLcRlw==", + "dev": true, + "license": "MIT", + "dependencies": { + "debug": "^4.3.4", + "sax": "^1.2.4" + }, + "engines": { + "node": ">=12.0.0" + } + }, + "node_modules/builder-util/node_modules/fs-extra": { + "version": "10.1.0", + "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-10.1.0.tgz", + "integrity": "sha512-oRXApq54ETRj4eMiFzGnHWGy+zo5raudjuxN0b8H7s/RU2oW0Wvsx9O0ACRN/kRq9E8Vu/ReskGB5o3ji+FzHQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "graceful-fs": "^4.2.0", + "jsonfile": "^6.0.1", + "universalify": "^2.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/builder-util/node_modules/jsonfile": { + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-6.2.0.tgz", + "integrity": "sha512-FGuPw30AdOIUTRMC2OMRtQV+jkVj2cfPqSeWXv1NEAJ1qZ5zb1X6z1mFhbfOB/iy3ssJCD+3KuZ8r8C3uVFlAg==", + "dev": true, + "license": "MIT", + "dependencies": { + "universalify": "^2.0.0" + }, + "optionalDependencies": { + "graceful-fs": "^4.1.6" + } + }, + "node_modules/builder-util/node_modules/universalify": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/universalify/-/universalify-2.0.1.tgz", + "integrity": "sha512-gptHNQghINnc/vTGIk0SOFGFNXw7JVrlRUtConJRlvaw6DuX0wO5Jeko9sWrMBhh+PsYAZ7oXAiOnf/UKogyiw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 10.0.0" + } + }, + "node_modules/cac": { + "version": "6.7.14", + "resolved": "https://registry.npmjs.org/cac/-/cac-6.7.14.tgz", + "integrity": "sha512-b6Ilus+c3RrdDk+JhLKUAQfzzgLEPy6wcXqS7f/xe1EETvsDP6GORG7SFuOs6cID5YkqchW/LXZbX5bc8j7ZcQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/cacache": { + "version": "16.1.3", + "resolved": "https://registry.npmjs.org/cacache/-/cacache-16.1.3.tgz", + "integrity": "sha512-/+Emcj9DAXxX4cwlLmRI9c166RuL3w30zp4R7Joiv2cQTtTtA+jeuCAjH3ZlGnYS3tKENSrKhAzVVP9GVyzeYQ==", + "dev": true, + "license": "ISC", + "dependencies": { + "@npmcli/fs": "^2.1.0", + "@npmcli/move-file": "^2.0.0", + "chownr": "^2.0.0", + "fs-minipass": "^2.1.0", + "glob": "^8.0.1", + "infer-owner": "^1.0.4", + "lru-cache": "^7.7.1", + "minipass": "^3.1.6", + "minipass-collect": "^1.0.2", + "minipass-flush": "^1.0.5", + "minipass-pipeline": "^1.2.4", + "mkdirp": "^1.0.4", + "p-map": "^4.0.0", + "promise-inflight": "^1.0.1", + "rimraf": "^3.0.2", + "ssri": "^9.0.0", + "tar": "^6.1.11", + "unique-filename": "^2.0.0" + }, + "engines": { + "node": "^12.13.0 || ^14.15.0 || >=16.0.0" + } + }, + "node_modules/cacache/node_modules/balanced-match": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", + "dev": true, + "license": "MIT" + }, + "node_modules/cacache/node_modules/brace-expansion": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.1.0.tgz", + "integrity": "sha512-TN1kCZAgdgweJhWWpgKYrQaMNHcDULHkWwQIspdtjV4Y5aurRdZpjAqn6yX3FPqTA9ngHCc4hJxMAMgGfve85w==", + "dev": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/cacache/node_modules/glob": { + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/glob/-/glob-8.1.0.tgz", + "integrity": "sha512-r8hpEjiQEYlF2QU0df3dS+nxxSIreXQS1qRhMJM0Q5NDdR386C7jb7Hwwod8Fgiuex+k0GFjgft18yvxm5XoCQ==", + "deprecated": "Old versions of glob are not supported, and contain widely publicized security vulnerabilities, which have been fixed in the current version. Please update. Support for old versions may be purchased (at exorbitant rates) by contacting i@izs.me", + "dev": true, + "license": "ISC", + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^5.0.1", + "once": "^1.3.0" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/cacache/node_modules/lru-cache": { + "version": "7.18.3", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.18.3.tgz", + "integrity": "sha512-jumlc0BIUrS3qJGgIkWZsyfAM7NCWiBcCDhnd+3NNM5KbBmLTgHVfWBcg6W+rLUsIpzpERPsvwUP7CckAQSOoA==", + "dev": true, + "license": "ISC", + "engines": { + "node": ">=12" + } + }, + "node_modules/cacache/node_modules/minimatch": { + "version": "5.1.9", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.1.9.tgz", + "integrity": "sha512-7o1wEA2RyMP7Iu7GNba9vc0RWWGACJOCZBJX2GJWip0ikV+wcOsgVuY9uE8CPiyQhkGFSlhuSkZPavN7u1c2Fw==", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/cacheable-lookup": { + "version": "5.0.4", + "resolved": "https://registry.npmjs.org/cacheable-lookup/-/cacheable-lookup-5.0.4.tgz", + "integrity": "sha512-2/kNscPhpcxrOigMZzbiWF7dz8ilhb/nIHU3EyZiXWXpeq/au8qJ8VhdftMkty3n7Gj6HIGalQG8oiBNB3AJgA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10.6.0" + } + }, + "node_modules/cacheable-request": { + "version": "7.0.4", + "resolved": "https://registry.npmjs.org/cacheable-request/-/cacheable-request-7.0.4.tgz", + "integrity": "sha512-v+p6ongsrp0yTGbJXjgxPow2+DL93DASP4kXCDKb8/bwRtt9OEF3whggkkDkGNzgcWy2XaF4a8nZglC7uElscg==", + "dev": true, + "license": "MIT", + "dependencies": { + "clone-response": "^1.0.2", + "get-stream": "^5.1.0", + "http-cache-semantics": "^4.0.0", + "keyv": "^4.0.0", + "lowercase-keys": "^2.0.0", + "normalize-url": "^6.0.1", + "responselike": "^2.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/call-bind-apply-helpers": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/call-bind-apply-helpers/-/call-bind-apply-helpers-1.0.2.tgz", + "integrity": "sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/caniuse-lite": { + "version": "1.0.30001788", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001788.tgz", + "integrity": "sha512-6q8HFp+lOQtcf7wBK+uEenxymVWkGKkjFpCvw5W25cmMwEDU45p1xQFBQv8JDlMMry7eNxyBaR+qxgmTUZkIRQ==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/caniuse-lite" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "CC-BY-4.0" + }, + "node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/chownr": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/chownr/-/chownr-2.0.0.tgz", + "integrity": "sha512-bIomtDF5KGpdogkLd9VspvFzk9KfpyyGlS8YFVZl7TGPBHL5snIOnxeshwVgPteQ9b4Eydl+pVbIyE1DcvCWgQ==", + "dev": true, + "license": "ISC", + "engines": { + "node": ">=10" + } + }, + "node_modules/chromium-pickle-js": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/chromium-pickle-js/-/chromium-pickle-js-0.2.0.tgz", + "integrity": "sha512-1R5Fho+jBq0DDydt+/vHWj5KJNJCKdARKOCwZUen84I5BreWoLqRLANH1U87eJy1tiASPtMnGqJJq0ZsLoRPOw==", + "dev": true, + "license": "MIT" + }, + "node_modules/ci-info": { + "version": "3.9.0", + "resolved": "https://registry.npmjs.org/ci-info/-/ci-info-3.9.0.tgz", + "integrity": "sha512-NIxF55hv4nSqQswkAeiOi1r83xy8JldOFDTWiug55KBu9Jnblncd2U6ViHmYgHf01TPZS77NJBhBMKdWj9HQMQ==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/sibiraj-s" + } + ], + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/class-variance-authority": { + "version": "0.7.1", + "resolved": "https://registry.npmjs.org/class-variance-authority/-/class-variance-authority-0.7.1.tgz", + "integrity": "sha512-Ka+9Trutv7G8M6WT6SeiRWz792K5qEqIGEGzXKhAE6xOWAY6pPH8U+9IY3oCMv6kqTmLsv7Xh/2w2RigkePMsg==", + "license": "Apache-2.0", + "dependencies": { + "clsx": "^2.1.1" + }, + "funding": { + "url": "https://polar.sh/cva" + } + }, + "node_modules/clean-stack": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/clean-stack/-/clean-stack-2.2.0.tgz", + "integrity": "sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/cli-cursor": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-3.1.0.tgz", + "integrity": "sha512-I/zHAwsKf9FqGoXM4WWRACob9+SNukZTd94DWF57E4toouRulbCxcUh6RKUEOQlYTHJnzkPMySvPNaaSLNfLZw==", + "dev": true, + "license": "MIT", + "dependencies": { + "restore-cursor": "^3.1.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/cli-spinners": { + "version": "2.9.2", + "resolved": "https://registry.npmjs.org/cli-spinners/-/cli-spinners-2.9.2.tgz", + "integrity": "sha512-ywqV+5MmyL4E7ybXgKys4DugZbX0FC6LnwrhjuykIjnK9k8OQacQ7axGKnjDXWNhns0xot3bZI5h55H8yo9cJg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/cli-truncate": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/cli-truncate/-/cli-truncate-2.1.0.tgz", + "integrity": "sha512-n8fOixwDD6b/ObinzTrp1ZKFzbgvKZvuz/TvejnLn1aQfC6r52XEx85FmuC+3HI+JM7coBRXUvNqEU2PHVrHpg==", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "slice-ansi": "^3.0.0", + "string-width": "^4.2.0" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/cliui": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-8.0.1.tgz", + "integrity": "sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==", + "dev": true, + "license": "ISC", + "dependencies": { + "string-width": "^4.2.0", + "strip-ansi": "^6.0.1", + "wrap-ansi": "^7.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/clone": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/clone/-/clone-1.0.4.tgz", + "integrity": "sha512-JQHZ2QMW6l3aH/j6xCqQThY/9OH4D/9ls34cgkUBiEeocRTU04tHfKPBsUK1PqZCUQM7GiA0IIXJSuXHI64Kbg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.8" + } + }, + "node_modules/clone-response": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/clone-response/-/clone-response-1.0.3.tgz", + "integrity": "sha512-ROoL94jJH2dUVML2Y/5PEDNaSHgeOdSDicUyS7izcF63G6sTc/FTjLub4b8Il9S8S0beOfYt0TaA5qvFK+w0wA==", + "dev": true, + "license": "MIT", + "dependencies": { + "mimic-response": "^1.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/clsx": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/clsx/-/clsx-2.1.1.tgz", + "integrity": "sha512-eYm0QWBtUrBWZWG0d386OGAw16Z995PiOVo2B7bjWSbHedGl5e0ZWaq65kOGgUSNesEIDkB9ISbTg/JK9dhCZA==", + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true, + "license": "MIT" + }, + "node_modules/color-support": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/color-support/-/color-support-1.1.3.tgz", + "integrity": "sha512-qiBjkpbMLO/HL68y+lh4q0/O1MZFj2RX6X/KmMa3+gJD3z+WwI1ZzDHysvqHGS3mP6mznPckpXmw1nI9cJjyRg==", + "dev": true, + "license": "ISC", + "bin": { + "color-support": "bin.js" + } + }, + "node_modules/combined-stream": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", + "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", + "dev": true, + "license": "MIT", + "dependencies": { + "delayed-stream": "~1.0.0" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/commander": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/commander/-/commander-5.1.0.tgz", + "integrity": "sha512-P0CysNDQ7rtVw4QIQtm+MRxV66vKFSvlsQvGYXZWR3qFU0jlMKHZZZgw8e+8DSah4UDKMqnknRDQz+xuQXQ/Zg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 6" + } + }, + "node_modules/compare-version": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/compare-version/-/compare-version-0.1.2.tgz", + "integrity": "sha512-pJDh5/4wrEnXX/VWRZvruAGHkzKdr46z11OlTPN+VrATlWWhSKewNCJ1futCO5C7eJB3nPMFZA1LeYtcFboZ2A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==", + "dev": true, + "license": "MIT" + }, + "node_modules/config-file-ts": { + "version": "0.2.8-rc1", + "resolved": "https://registry.npmjs.org/config-file-ts/-/config-file-ts-0.2.8-rc1.tgz", + "integrity": "sha512-GtNECbVI82bT4RiDIzBSVuTKoSHufnU7Ce7/42bkWZJZFLjmDF2WBpVsvRkhKCfKBnTBb3qZrBwPpFBU/Myvhg==", + "dev": true, + "license": "MIT", + "dependencies": { + "glob": "^10.3.12", + "typescript": "^5.4.3" + } + }, + "node_modules/config-file-ts/node_modules/balanced-match": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", + "dev": true, + "license": "MIT" + }, + "node_modules/config-file-ts/node_modules/brace-expansion": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.1.0.tgz", + "integrity": "sha512-TN1kCZAgdgweJhWWpgKYrQaMNHcDULHkWwQIspdtjV4Y5aurRdZpjAqn6yX3FPqTA9ngHCc4hJxMAMgGfve85w==", + "dev": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/config-file-ts/node_modules/glob": { + "version": "10.5.0", + "resolved": "https://registry.npmjs.org/glob/-/glob-10.5.0.tgz", + "integrity": "sha512-DfXN8DfhJ7NH3Oe7cFmu3NCu1wKbkReJ8TorzSAFbSKrlNaQSKfIzqYqVY8zlbs2NLBbWpRiU52GX2PbaBVNkg==", + "deprecated": "Old versions of glob are not supported, and contain widely publicized security vulnerabilities, which have been fixed in the current version. Please update. Support for old versions may be purchased (at exorbitant rates) by contacting i@izs.me", + "dev": true, + "license": "ISC", + "dependencies": { + "foreground-child": "^3.1.0", + "jackspeak": "^3.1.2", + "minimatch": "^9.0.4", + "minipass": "^7.1.2", + "package-json-from-dist": "^1.0.0", + "path-scurry": "^1.11.1" + }, + "bin": { + "glob": "dist/esm/bin.mjs" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/config-file-ts/node_modules/minimatch": { + "version": "9.0.9", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.9.tgz", + "integrity": "sha512-OBwBN9AL4dqmETlpS2zasx+vTeWclWzkblfZk7KTA5j3jeOONz/tRCnZomUyvNg83wL5Zv9Ss6HMJXAgL8R2Yg==", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^2.0.2" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/config-file-ts/node_modules/minipass": { + "version": "7.1.3", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.1.3.tgz", + "integrity": "sha512-tEBHqDnIoM/1rXME1zgka9g6Q2lcoCkxHLuc7ODJ5BxbP5d4c2Z5cGgtXAku59200Cx7diuHTOYfSBD8n6mm8A==", + "dev": true, + "license": "BlueOak-1.0.0", + "engines": { + "node": ">=16 || 14 >=14.17" + } + }, + "node_modules/console-control-strings": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/console-control-strings/-/console-control-strings-1.1.0.tgz", + "integrity": "sha512-ty/fTekppD2fIwRvnZAVdeOiGd1c7YXEixbgJTNzqcxJWKQnjJ/V1bNEEE6hygpM3WjwHFUVK6HTjWSzV4a8sQ==", + "dev": true, + "license": "ISC" + }, + "node_modules/convert-source-map": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-2.0.0.tgz", + "integrity": "sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==", + "dev": true, + "license": "MIT" + }, + "node_modules/cookie": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/cookie/-/cookie-1.1.1.tgz", + "integrity": "sha512-ei8Aos7ja0weRpFzJnEA9UHJ/7XQmqglbRwnf2ATjcB9Wq874VKH9kfjjirM6UhU2/E5fFYadylyhFldcqSidQ==", + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" + } + }, + "node_modules/core-util-is": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz", + "integrity": "sha512-3lqz5YjWTYnW6dlDa5TLaTCcShfar1e40rmcJVwCBJC6mWlFuj0eCHIElmG1g5kyuJ/GD+8Wn4FFCcz4gJPfaQ==", + "dev": true, + "license": "MIT", + "optional": true + }, + "node_modules/crc": { + "version": "3.8.0", + "resolved": "https://registry.npmjs.org/crc/-/crc-3.8.0.tgz", + "integrity": "sha512-iX3mfgcTMIq3ZKLIsVFAbv7+Mc10kxabAGQb8HvjA1o3T1PIYprbakQ65d3I+2HGHt6nSKkM9PYjgoJO2KcFBQ==", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "buffer": "^5.1.0" + } + }, + "node_modules/cross-spawn": { + "version": "7.0.6", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz", + "integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==", + "dev": true, + "license": "MIT", + "dependencies": { + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/csstype": { + "version": "3.2.3", + "resolved": "https://registry.npmjs.org/csstype/-/csstype-3.2.3.tgz", + "integrity": "sha512-z1HGKcYy2xA8AGQfwrn0PAy+PB7X/GSj3UVJW9qKyn43xWa+gl5nXmU4qqLMRzWVLFC8KusUX8T/0kCiOYpAIQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/debug": { + "version": "4.4.3", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.3.tgz", + "integrity": "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==", + "license": "MIT", + "dependencies": { + "ms": "^2.1.3" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/decompress-response": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/decompress-response/-/decompress-response-6.0.0.tgz", + "integrity": "sha512-aW35yZM6Bb/4oJlZncMH2LCoZtJXTRxES17vE3hoRiowU2kWHaJKFkSBDnDR+cm9J+9QhXmREyIfv0pji9ejCQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "mimic-response": "^3.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/decompress-response/node_modules/mimic-response": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/mimic-response/-/mimic-response-3.1.0.tgz", + "integrity": "sha512-z0yWI+4FDrrweS8Zmt4Ej5HdJmky15+L2e6Wgn3+iK5fWzb6T3fhNFq2+MeTRb064c6Wr4N/wv0DzQTjNzHNGQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/defaults": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/defaults/-/defaults-1.0.4.tgz", + "integrity": "sha512-eFuaLoy/Rxalv2kr+lqMlUnrDWV+3j4pljOIJgLIhI058IQfWJ7vXhyEIHu+HtC738klGALYxOKDO0bQP3tg8A==", + "dev": true, + "license": "MIT", + "dependencies": { + "clone": "^1.0.2" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/defer-to-connect": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/defer-to-connect/-/defer-to-connect-2.0.1.tgz", + "integrity": "sha512-4tvttepXG1VaYGrRibk5EwJd1t4udunSOVMdLSAL6mId1ix438oPwPZMALY41FCijukO1L0twNcGsdzS7dHgDg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + } + }, + "node_modules/define-data-property": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/define-data-property/-/define-data-property-1.1.4.tgz", + "integrity": "sha512-rBMvIzlpA8v6E+SJZoo++HAYqsLrkg7MSfIinMPFhmkorw7X+dOXVJQs+QT69zGkzMyfDnIMN2Wid1+NbL3T+A==", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "es-define-property": "^1.0.0", + "es-errors": "^1.3.0", + "gopd": "^1.0.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/define-properties": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.2.1.tgz", + "integrity": "sha512-8QmQKqEASLd5nx0U1B1okLElbUuuttJ/AnYmRXbbbGDWh6uS208EjD4Xqq/I9wK7u0v6O08XhTWnt5XtEbR6Dg==", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "define-data-property": "^1.0.1", + "has-property-descriptors": "^1.0.0", + "object-keys": "^1.1.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/delayed-stream": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", + "integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/delegates": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/delegates/-/delegates-1.0.0.tgz", + "integrity": "sha512-bd2L678uiWATM6m5Z1VzNCErI3jiGzt6HGY8OVICs40JQq/HALfbyNJmp0UDakEY4pMMaN0Ly5om/B1VI/+xfQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/detect-libc": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-2.1.2.tgz", + "integrity": "sha512-Btj2BOOO83o3WyH59e8MgXsxEQVcarkUOpEYrubB0urwnN10yQ364rsiByU11nZlqWYZm05i/of7io4mzihBtQ==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": ">=8" + } + }, + "node_modules/detect-node": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/detect-node/-/detect-node-2.1.0.tgz", + "integrity": "sha512-T0NIuQpnTvFDATNuHN5roPwSBG83rFsuO+MXXH9/3N1eFbn4wcPjttvjMLEPWJ0RGUYgQE7cGgS3tNxbqCGM7g==", + "dev": true, + "license": "MIT", + "optional": true + }, + "node_modules/detect-node-es": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/detect-node-es/-/detect-node-es-1.1.0.tgz", + "integrity": "sha512-ypdmJU/TbBby2Dxibuv7ZLW3Bs1QEmM7nHjEANfohJLvE0XVujisn1qPJcZxg+qDucsr+bP6fLD1rPS3AhJ7EQ==", + "license": "MIT" + }, + "node_modules/dir-compare": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/dir-compare/-/dir-compare-4.2.0.tgz", + "integrity": "sha512-2xMCmOoMrdQIPHdsTawECdNPwlVFB9zGcz3kuhmBO6U3oU+UQjsue0i8ayLKpgBcm+hcXPMVSGUN9d+pvJ6+VQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "minimatch": "^3.0.5", + "p-limit": "^3.1.0 " + } + }, + "node_modules/dir-compare/node_modules/balanced-match": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", + "dev": true, + "license": "MIT" + }, + "node_modules/dir-compare/node_modules/brace-expansion": { + "version": "1.1.14", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.14.tgz", + "integrity": "sha512-MWPGfDxnyzKU7rNOW9SP/c50vi3xrmrua/+6hfPbCS2ABNWfx24vPidzvC7krjU/RTo235sV776ymlsMtGKj8g==", + "dev": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/dir-compare/node_modules/minimatch": { + "version": "3.1.5", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.5.tgz", + "integrity": "sha512-VgjWUsnnT6n+NUk6eZq77zeFdpW2LWDzP6zFGrCbHXiYNul5Dzqk2HHQ5uFH2DNW5Xbp8+jVzaeNt94ssEEl4w==", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/dmg-builder": { + "version": "25.1.8", + "resolved": "https://registry.npmjs.org/dmg-builder/-/dmg-builder-25.1.8.tgz", + "integrity": "sha512-NoXo6Liy2heSklTI5OIZbCgXC1RzrDQsZkeEwXhdOro3FT1VBOvbubvscdPnjVuQ4AMwwv61oaH96AbiYg9EnQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "app-builder-lib": "25.1.8", + "builder-util": "25.1.7", + "builder-util-runtime": "9.2.10", + "fs-extra": "^10.1.0", + "iconv-lite": "^0.6.2", + "js-yaml": "^4.1.0" + }, + "optionalDependencies": { + "dmg-license": "^1.0.11" + } + }, + "node_modules/dmg-builder/node_modules/fs-extra": { + "version": "10.1.0", + "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-10.1.0.tgz", + "integrity": "sha512-oRXApq54ETRj4eMiFzGnHWGy+zo5raudjuxN0b8H7s/RU2oW0Wvsx9O0ACRN/kRq9E8Vu/ReskGB5o3ji+FzHQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "graceful-fs": "^4.2.0", + "jsonfile": "^6.0.1", + "universalify": "^2.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/dmg-builder/node_modules/jsonfile": { + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-6.2.0.tgz", + "integrity": "sha512-FGuPw30AdOIUTRMC2OMRtQV+jkVj2cfPqSeWXv1NEAJ1qZ5zb1X6z1mFhbfOB/iy3ssJCD+3KuZ8r8C3uVFlAg==", + "dev": true, + "license": "MIT", + "dependencies": { + "universalify": "^2.0.0" + }, + "optionalDependencies": { + "graceful-fs": "^4.1.6" + } + }, + "node_modules/dmg-builder/node_modules/universalify": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/universalify/-/universalify-2.0.1.tgz", + "integrity": "sha512-gptHNQghINnc/vTGIk0SOFGFNXw7JVrlRUtConJRlvaw6DuX0wO5Jeko9sWrMBhh+PsYAZ7oXAiOnf/UKogyiw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 10.0.0" + } + }, + "node_modules/dmg-license": { + "version": "1.0.11", + "resolved": "https://registry.npmjs.org/dmg-license/-/dmg-license-1.0.11.tgz", + "integrity": "sha512-ZdzmqwKmECOWJpqefloC5OJy1+WZBBse5+MR88z9g9Zn4VY+WYUkAyojmhzJckH5YbbZGcYIuGAkY5/Ys5OM2Q==", + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "dependencies": { + "@types/plist": "^3.0.1", + "@types/verror": "^1.10.3", + "ajv": "^6.10.0", + "crc": "^3.8.0", + "iconv-corefoundation": "^1.1.7", + "plist": "^3.0.4", + "smart-buffer": "^4.0.2", + "verror": "^1.10.0" + }, + "bin": { + "dmg-license": "bin/dmg-license.js" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/dotenv": { + "version": "16.6.1", + "resolved": "https://registry.npmjs.org/dotenv/-/dotenv-16.6.1.tgz", + "integrity": "sha512-uBq4egWHTcTt33a72vpSG0z3HnPuIl6NqYcTrKEg2azoEyl2hpW0zqlxysq2pK9HlDIHyHyakeYaYnSAwd8bow==", + "dev": true, + "license": "BSD-2-Clause", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://dotenvx.com" + } + }, + "node_modules/dotenv-expand": { + "version": "11.0.7", + "resolved": "https://registry.npmjs.org/dotenv-expand/-/dotenv-expand-11.0.7.tgz", + "integrity": "sha512-zIHwmZPRshsCdpMDyVsqGmgyP0yT8GAgXUnkdAoJisxvf33k7yO6OuoKmcTGuXPWSsm8Oh88nZicRLA9Y0rUeA==", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "dotenv": "^16.4.5" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://dotenvx.com" + } + }, + "node_modules/dunder-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz", + "integrity": "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind-apply-helpers": "^1.0.1", + "es-errors": "^1.3.0", + "gopd": "^1.2.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/eastasianwidth": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/eastasianwidth/-/eastasianwidth-0.2.0.tgz", + "integrity": "sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==", + "dev": true, + "license": "MIT" + }, + "node_modules/ejs": { + "version": "3.1.10", + "resolved": "https://registry.npmjs.org/ejs/-/ejs-3.1.10.tgz", + "integrity": "sha512-UeJmFfOrAQS8OJWPZ4qtgHyWExa088/MtK5UEyoJGFH67cDEXkZSviOiKRCZ4Xij0zxI3JECgYs3oKx+AizQBA==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "jake": "^10.8.5" + }, + "bin": { + "ejs": "bin/cli.js" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/electron": { + "version": "36.9.5", + "resolved": "https://registry.npmjs.org/electron/-/electron-36.9.5.tgz", + "integrity": "sha512-1UCss2IqxqujSzg/2jkRjuiT3G+EEXgd6UKB5kUekwQW1LJ6d4QCr8YItfC3Rr9VIGRDJ29eOERmnRNO1Eh+NA==", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "dependencies": { + "@electron/get": "^2.0.0", + "@types/node": "^22.7.7", + "extract-zip": "^2.0.1" + }, + "bin": { + "electron": "cli.js" + }, + "engines": { + "node": ">= 12.20.55" + } + }, + "node_modules/electron-builder": { + "version": "25.1.8", + "resolved": "https://registry.npmjs.org/electron-builder/-/electron-builder-25.1.8.tgz", + "integrity": "sha512-poRgAtUHHOnlzZnc9PK4nzG53xh74wj2Jy7jkTrqZ0MWPoHGh1M2+C//hGeYdA+4K8w4yiVCNYoLXF7ySj2Wig==", + "dev": true, + "license": "MIT", + "dependencies": { + "app-builder-lib": "25.1.8", + "builder-util": "25.1.7", + "builder-util-runtime": "9.2.10", + "chalk": "^4.1.2", + "dmg-builder": "25.1.8", + "fs-extra": "^10.1.0", + "is-ci": "^3.0.0", + "lazy-val": "^1.0.5", + "simple-update-notifier": "2.0.0", + "yargs": "^17.6.2" + }, + "bin": { + "electron-builder": "cli.js", + "install-app-deps": "install-app-deps.js" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/electron-builder/node_modules/fs-extra": { + "version": "10.1.0", + "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-10.1.0.tgz", + "integrity": "sha512-oRXApq54ETRj4eMiFzGnHWGy+zo5raudjuxN0b8H7s/RU2oW0Wvsx9O0ACRN/kRq9E8Vu/ReskGB5o3ji+FzHQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "graceful-fs": "^4.2.0", + "jsonfile": "^6.0.1", + "universalify": "^2.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/electron-builder/node_modules/jsonfile": { + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-6.2.0.tgz", + "integrity": "sha512-FGuPw30AdOIUTRMC2OMRtQV+jkVj2cfPqSeWXv1NEAJ1qZ5zb1X6z1mFhbfOB/iy3ssJCD+3KuZ8r8C3uVFlAg==", + "dev": true, + "license": "MIT", + "dependencies": { + "universalify": "^2.0.0" + }, + "optionalDependencies": { + "graceful-fs": "^4.1.6" + } + }, + "node_modules/electron-builder/node_modules/universalify": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/universalify/-/universalify-2.0.1.tgz", + "integrity": "sha512-gptHNQghINnc/vTGIk0SOFGFNXw7JVrlRUtConJRlvaw6DuX0wO5Jeko9sWrMBhh+PsYAZ7oXAiOnf/UKogyiw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 10.0.0" + } + }, + "node_modules/electron-publish": { + "version": "25.1.7", + "resolved": "https://registry.npmjs.org/electron-publish/-/electron-publish-25.1.7.tgz", + "integrity": "sha512-+jbTkR9m39eDBMP4gfbqglDd6UvBC7RLh5Y0MhFSsc6UkGHj9Vj9TWobxevHYMMqmoujL11ZLjfPpMX+Pt6YEg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/fs-extra": "^9.0.11", + "builder-util": "25.1.7", + "builder-util-runtime": "9.2.10", + "chalk": "^4.1.2", + "fs-extra": "^10.1.0", + "lazy-val": "^1.0.5", + "mime": "^2.5.2" + } + }, + "node_modules/electron-publish/node_modules/fs-extra": { + "version": "10.1.0", + "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-10.1.0.tgz", + "integrity": "sha512-oRXApq54ETRj4eMiFzGnHWGy+zo5raudjuxN0b8H7s/RU2oW0Wvsx9O0ACRN/kRq9E8Vu/ReskGB5o3ji+FzHQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "graceful-fs": "^4.2.0", + "jsonfile": "^6.0.1", + "universalify": "^2.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/electron-publish/node_modules/jsonfile": { + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-6.2.0.tgz", + "integrity": "sha512-FGuPw30AdOIUTRMC2OMRtQV+jkVj2cfPqSeWXv1NEAJ1qZ5zb1X6z1mFhbfOB/iy3ssJCD+3KuZ8r8C3uVFlAg==", + "dev": true, + "license": "MIT", + "dependencies": { + "universalify": "^2.0.0" + }, + "optionalDependencies": { + "graceful-fs": "^4.1.6" + } + }, + "node_modules/electron-publish/node_modules/universalify": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/universalify/-/universalify-2.0.1.tgz", + "integrity": "sha512-gptHNQghINnc/vTGIk0SOFGFNXw7JVrlRUtConJRlvaw6DuX0wO5Jeko9sWrMBhh+PsYAZ7oXAiOnf/UKogyiw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 10.0.0" + } + }, + "node_modules/electron-to-chromium": { + "version": "1.5.340", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.340.tgz", + "integrity": "sha512-908qahOGocRMinT2nM3ajCEM99H4iPdv84eagPP3FfZy/1ZGeOy2CZYzjhms81ckOPCXPlW7LkY4XpxD8r1DrA==", + "dev": true, + "license": "ISC" + }, + "node_modules/electron-updater": { + "version": "6.8.3", + "resolved": "https://registry.npmjs.org/electron-updater/-/electron-updater-6.8.3.tgz", + "integrity": "sha512-Z6sgw3jgbikWKXei1ENdqFOxBP0WlXg3TtKfz0rgw2vIZFJUyI4pD7ZN7jrkm7EoMK+tcm/qTnPUdqfZukBlBQ==", + "license": "MIT", + "dependencies": { + "builder-util-runtime": "9.5.1", + "fs-extra": "^10.1.0", + "js-yaml": "^4.1.0", + "lazy-val": "^1.0.5", + "lodash.escaperegexp": "^4.1.2", + "lodash.isequal": "^4.5.0", + "semver": "~7.7.3", + "tiny-typed-emitter": "^2.1.0" + } + }, + "node_modules/electron-updater/node_modules/builder-util-runtime": { + "version": "9.5.1", + "resolved": "https://registry.npmjs.org/builder-util-runtime/-/builder-util-runtime-9.5.1.tgz", + "integrity": "sha512-qt41tMfgHTllhResqM5DcnHyDIWNgzHvuY2jDcYP9iaGpkWxTUzV6GQjDeLnlR1/DtdlcsWQbA7sByMpmJFTLQ==", + "license": "MIT", + "dependencies": { + "debug": "^4.3.4", + "sax": "^1.2.4" + }, + "engines": { + "node": ">=12.0.0" + } + }, + "node_modules/electron-updater/node_modules/fs-extra": { + "version": "10.1.0", + "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-10.1.0.tgz", + "integrity": "sha512-oRXApq54ETRj4eMiFzGnHWGy+zo5raudjuxN0b8H7s/RU2oW0Wvsx9O0ACRN/kRq9E8Vu/ReskGB5o3ji+FzHQ==", + "license": "MIT", + "dependencies": { + "graceful-fs": "^4.2.0", + "jsonfile": "^6.0.1", + "universalify": "^2.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/electron-updater/node_modules/jsonfile": { + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-6.2.0.tgz", + "integrity": "sha512-FGuPw30AdOIUTRMC2OMRtQV+jkVj2cfPqSeWXv1NEAJ1qZ5zb1X6z1mFhbfOB/iy3ssJCD+3KuZ8r8C3uVFlAg==", + "license": "MIT", + "dependencies": { + "universalify": "^2.0.0" + }, + "optionalDependencies": { + "graceful-fs": "^4.1.6" + } + }, + "node_modules/electron-updater/node_modules/semver": { + "version": "7.7.4", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.4.tgz", + "integrity": "sha512-vFKC2IEtQnVhpT78h1Yp8wzwrf8CM+MzKMHGJZfBtzhZNycRFnXsHk6E5TxIkkMsgNS7mdX3AGB7x2QM2di4lA==", + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/electron-updater/node_modules/universalify": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/universalify/-/universalify-2.0.1.tgz", + "integrity": "sha512-gptHNQghINnc/vTGIk0SOFGFNXw7JVrlRUtConJRlvaw6DuX0wO5Jeko9sWrMBhh+PsYAZ7oXAiOnf/UKogyiw==", + "license": "MIT", + "engines": { + "node": ">= 10.0.0" + } + }, + "node_modules/electron-vite": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/electron-vite/-/electron-vite-2.3.0.tgz", + "integrity": "sha512-lsN2FymgJlp4k6MrcsphGqZQ9fKRdJKasoaiwIrAewN1tapYI/KINLdfEL7n10LuF0pPSNf/IqjzZbB5VINctg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/core": "^7.24.7", + "@babel/plugin-transform-arrow-functions": "^7.24.7", + "cac": "^6.7.14", + "esbuild": "^0.21.5", + "magic-string": "^0.30.10", + "picocolors": "^1.0.1" + }, + "bin": { + "electron-vite": "bin/electron-vite.js" + }, + "engines": { + "node": "^18.0.0 || >=20.0.0" + }, + "peerDependencies": { + "@swc/core": "^1.0.0", + "vite": "^4.0.0 || ^5.0.0" + }, + "peerDependenciesMeta": { + "@swc/core": { + "optional": true + } + } + }, + "node_modules/electron/node_modules/@types/node": { + "version": "22.19.17", + "resolved": "https://registry.npmjs.org/@types/node/-/node-22.19.17.tgz", + "integrity": "sha512-wGdMcf+vPYM6jikpS/qhg6WiqSV/OhG+jeeHT/KlVqxYfD40iYJf9/AE1uQxVWFvU7MipKRkRv8NSHiCGgPr8Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "undici-types": "~6.21.0" + } + }, + "node_modules/electron/node_modules/undici-types": { + "version": "6.21.0", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.21.0.tgz", + "integrity": "sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true, + "license": "MIT" + }, + "node_modules/encoding": { + "version": "0.1.13", + "resolved": "https://registry.npmjs.org/encoding/-/encoding-0.1.13.tgz", + "integrity": "sha512-ETBauow1T35Y/WZMkio9jiM0Z5xjHHmJ4XmjZOq1l/dXz3lr2sRn87nJy20RupqSh1F2m3HHPSp8ShIPQJrJ3A==", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "iconv-lite": "^0.6.2" + } + }, + "node_modules/end-of-stream": { + "version": "1.4.5", + "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.5.tgz", + "integrity": "sha512-ooEGc6HP26xXq/N+GCGOT0JKCLDGrq2bQUZrQ7gyrJiZANJ/8YDTxTpQBXGMn+WbIQXNVpyWymm7KYVICQnyOg==", + "dev": true, + "license": "MIT", + "dependencies": { + "once": "^1.4.0" + } + }, + "node_modules/enhanced-resolve": { + "version": "5.20.1", + "resolved": "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.20.1.tgz", + "integrity": "sha512-Qohcme7V1inbAfvjItgw0EaxVX5q2rdVEZHRBrEQdRZTssLDGsL8Lwrznl8oQ/6kuTJONLaDcGjkNP247XEhcA==", + "dev": true, + "license": "MIT", + "dependencies": { + "graceful-fs": "^4.2.4", + "tapable": "^2.3.0" + }, + "engines": { + "node": ">=10.13.0" + } + }, + "node_modules/env-paths": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/env-paths/-/env-paths-2.2.1.tgz", + "integrity": "sha512-+h1lkLKhZMTYjog1VEpJNG7NZJWcuc2DDk/qsqSTRRCOXiLjeQ1d1/udrUGhqMxUgAlwKNZ0cf2uqan5GLuS2A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/err-code": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/err-code/-/err-code-2.0.3.tgz", + "integrity": "sha512-2bmlRpNKBxT/CRmPOlyISQpNj+qSeYvcym/uT0Jx2bMOlKLtSy1ZmLuVxSEKKyor/N5yhvp/ZiG1oE3DEYMSFA==", + "dev": true, + "license": "MIT" + }, + "node_modules/es-define-property": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.1.tgz", + "integrity": "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-errors": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz", + "integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-object-atoms": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/es-object-atoms/-/es-object-atoms-1.1.1.tgz", + "integrity": "sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==", + "dev": true, + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-set-tostringtag": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.1.0.tgz", + "integrity": "sha512-j6vWzfrGVfyXxge+O0x5sh6cvxAog0a/4Rdd2K36zCMV5eJ+/+tOAngRO8cODMNWbVRdVlmGZQL2YS3yR8bIUA==", + "dev": true, + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.6", + "has-tostringtag": "^1.0.2", + "hasown": "^2.0.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es6-error": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/es6-error/-/es6-error-4.1.1.tgz", + "integrity": "sha512-Um/+FxMr9CISWh0bi5Zv0iOD+4cFh5qLeks1qhAopKVAJw3drgKbKySikp7wGhDL0HPeaja0P5ULZrxLkniUVg==", + "dev": true, + "license": "MIT", + "optional": true + }, + "node_modules/esbuild": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.21.5.tgz", + "integrity": "sha512-mg3OPMV4hXywwpoDxu3Qda5xCKQi+vCTZq8S9J/EpkhB2HzKXq4SNFZE3+NK93JYxc8VMSep+lOUSC/RVKaBqw==", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "bin": { + "esbuild": "bin/esbuild" + }, + "engines": { + "node": ">=12" + }, + "optionalDependencies": { + "@esbuild/aix-ppc64": "0.21.5", + "@esbuild/android-arm": "0.21.5", + "@esbuild/android-arm64": "0.21.5", + "@esbuild/android-x64": "0.21.5", + "@esbuild/darwin-arm64": "0.21.5", + "@esbuild/darwin-x64": "0.21.5", + "@esbuild/freebsd-arm64": "0.21.5", + "@esbuild/freebsd-x64": "0.21.5", + "@esbuild/linux-arm": "0.21.5", + "@esbuild/linux-arm64": "0.21.5", + "@esbuild/linux-ia32": "0.21.5", + "@esbuild/linux-loong64": "0.21.5", + "@esbuild/linux-mips64el": "0.21.5", + "@esbuild/linux-ppc64": "0.21.5", + "@esbuild/linux-riscv64": "0.21.5", + "@esbuild/linux-s390x": "0.21.5", + "@esbuild/linux-x64": "0.21.5", + "@esbuild/netbsd-x64": "0.21.5", + "@esbuild/openbsd-x64": "0.21.5", + "@esbuild/sunos-x64": "0.21.5", + "@esbuild/win32-arm64": "0.21.5", + "@esbuild/win32-ia32": "0.21.5", + "@esbuild/win32-x64": "0.21.5" + } + }, + "node_modules/escalade": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.2.0.tgz", + "integrity": "sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/escape-string-regexp": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", + "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", + "dev": true, + "license": "MIT", + "optional": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/exponential-backoff": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/exponential-backoff/-/exponential-backoff-3.1.3.tgz", + "integrity": "sha512-ZgEeZXj30q+I0EN+CbSSpIyPaJ5HVQD18Z1m+u1FXbAeT94mr1zw50q4q6jiiC447Nl/YTcIYSAftiGqetwXCA==", + "dev": true, + "license": "Apache-2.0" + }, + "node_modules/extract-zip": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/extract-zip/-/extract-zip-2.0.1.tgz", + "integrity": "sha512-GDhU9ntwuKyGXdZBUgTIe+vXnWj0fppUEtMDL0+idd5Sta8TGpHssn/eusA9mrPr9qNDym6SxAYZjNvCn/9RBg==", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "debug": "^4.1.1", + "get-stream": "^5.1.0", + "yauzl": "^2.10.0" + }, + "bin": { + "extract-zip": "cli.js" + }, + "engines": { + "node": ">= 10.17.0" + }, + "optionalDependencies": { + "@types/yauzl": "^2.9.1" + } + }, + "node_modules/extsprintf": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/extsprintf/-/extsprintf-1.4.1.tgz", + "integrity": "sha512-Wrk35e8ydCKDj/ArClo1VrPVmN8zph5V4AtHwIuHhvMXsKf73UT3BOD+azBIW+3wOJ4FhEH7zyaJCFvChjYvMA==", + "dev": true, + "engines": [ + "node >=0.6.0" + ], + "license": "MIT", + "optional": true + }, + "node_modules/fast-deep-equal": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", + "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==", + "dev": true, + "license": "MIT" + }, + "node_modules/fast-json-stable-stringify": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", + "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==", + "dev": true, + "license": "MIT" + }, + "node_modules/fd-slicer": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/fd-slicer/-/fd-slicer-1.1.0.tgz", + "integrity": "sha512-cE1qsB/VwyQozZ+q1dGxR8LBYNZeofhEdUNGSMbQD3Gw2lAzX9Zb3uIU6Ebc/Fmyjo9AWWfnn0AUCHqtevs/8g==", + "dev": true, + "license": "MIT", + "dependencies": { + "pend": "~1.2.0" + } + }, + "node_modules/fdir": { + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/fdir/-/fdir-6.5.0.tgz", + "integrity": "sha512-tIbYtZbucOs0BRGqPJkshJUYdL+SDH7dVM8gjy+ERp3WAUjLEFJE+02kanyHtwjWOnwrKYBiwAmM0p4kLJAnXg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12.0.0" + }, + "peerDependencies": { + "picomatch": "^3 || ^4" + }, + "peerDependenciesMeta": { + "picomatch": { + "optional": true + } + } + }, + "node_modules/filelist": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/filelist/-/filelist-1.0.6.tgz", + "integrity": "sha512-5giy2PkLYY1cP39p17Ech+2xlpTRL9HLspOfEgm0L6CwBXBTgsK5ou0JtzYuepxkaQ/tvhCFIJ5uXo0OrM2DxA==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "minimatch": "^5.0.1" + } + }, + "node_modules/filelist/node_modules/balanced-match": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", + "dev": true, + "license": "MIT" + }, + "node_modules/filelist/node_modules/brace-expansion": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.1.0.tgz", + "integrity": "sha512-TN1kCZAgdgweJhWWpgKYrQaMNHcDULHkWwQIspdtjV4Y5aurRdZpjAqn6yX3FPqTA9ngHCc4hJxMAMgGfve85w==", + "dev": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/filelist/node_modules/minimatch": { + "version": "5.1.9", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.1.9.tgz", + "integrity": "sha512-7o1wEA2RyMP7Iu7GNba9vc0RWWGACJOCZBJX2GJWip0ikV+wcOsgVuY9uE8CPiyQhkGFSlhuSkZPavN7u1c2Fw==", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/foreground-child": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-3.3.1.tgz", + "integrity": "sha512-gIXjKqtFuWEgzFRJA9WCQeSJLZDjgJUOMCMzxtvFq/37KojM1BFGufqsCy0r4qSQmYLsZYMeyRqzIWOMup03sw==", + "dev": true, + "license": "ISC", + "dependencies": { + "cross-spawn": "^7.0.6", + "signal-exit": "^4.0.1" + }, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/foreground-child/node_modules/signal-exit": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz", + "integrity": "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==", + "dev": true, + "license": "ISC", + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/form-data": { + "version": "4.0.5", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.5.tgz", + "integrity": "sha512-8RipRLol37bNs2bhoV67fiTEvdTrbMUYcFTiy3+wuuOnUog2QBHCZWXDRijWQfAkhBj2Uf5UnVaiWwA5vdd82w==", + "dev": true, + "license": "MIT", + "dependencies": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.8", + "es-set-tostringtag": "^2.1.0", + "hasown": "^2.0.2", + "mime-types": "^2.1.12" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/fs-extra": { + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-8.1.0.tgz", + "integrity": "sha512-yhlQgA6mnOJUKOsRUFsgJdQCvkKhcz8tlZG5HBQfReYZy46OwLcY+Zia0mtdHsOo9y/hP+CxMN0TU9QxoOtG4g==", + "dev": true, + "license": "MIT", + "dependencies": { + "graceful-fs": "^4.2.0", + "jsonfile": "^4.0.0", + "universalify": "^0.1.0" + }, + "engines": { + "node": ">=6 <7 || >=8" + } + }, + "node_modules/fs-minipass": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/fs-minipass/-/fs-minipass-2.1.0.tgz", + "integrity": "sha512-V/JgOLFCS+R6Vcq0slCuaeWEdNC3ouDlJMNIsacH2VtALiu9mV4LPrHc5cDl8k5aw6J8jwgWWpiTo5RYhmIzvg==", + "dev": true, + "license": "ISC", + "dependencies": { + "minipass": "^3.0.0" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/fs.realpath": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==", + "dev": true, + "license": "ISC" + }, + "node_modules/fsevents": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", + "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^8.16.0 || ^10.6.0 || >=11.0.0" + } + }, + "node_modules/function-bind": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", + "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==", + "dev": true, + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/gauge": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/gauge/-/gauge-4.0.4.tgz", + "integrity": "sha512-f9m+BEN5jkg6a0fZjleidjN51VE1X+mPFQ2DJ0uv1V39oCLCbsGe6yjbBnp7eK7z/+GAon99a3nHuqbuuthyPg==", + "deprecated": "This package is no longer supported.", + "dev": true, + "license": "ISC", + "dependencies": { + "aproba": "^1.0.3 || ^2.0.0", + "color-support": "^1.1.3", + "console-control-strings": "^1.1.0", + "has-unicode": "^2.0.1", + "signal-exit": "^3.0.7", + "string-width": "^4.2.3", + "strip-ansi": "^6.0.1", + "wide-align": "^1.1.5" + }, + "engines": { + "node": "^12.13.0 || ^14.15.0 || >=16.0.0" + } + }, + "node_modules/gensync": { + "version": "1.0.0-beta.2", + "resolved": "https://registry.npmjs.org/gensync/-/gensync-1.0.0-beta.2.tgz", + "integrity": "sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/get-caller-file": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", + "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==", + "dev": true, + "license": "ISC", + "engines": { + "node": "6.* || 8.* || >= 10.*" + } + }, + "node_modules/get-intrinsic": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.3.0.tgz", + "integrity": "sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind-apply-helpers": "^1.0.2", + "es-define-property": "^1.0.1", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.1.1", + "function-bind": "^1.1.2", + "get-proto": "^1.0.1", + "gopd": "^1.2.0", + "has-symbols": "^1.1.0", + "hasown": "^2.0.2", + "math-intrinsics": "^1.1.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/get-nonce": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/get-nonce/-/get-nonce-1.0.1.tgz", + "integrity": "sha512-FJhYRoDaiatfEkUK8HKlicmu/3SGFD51q3itKDGoSTysQJBnfOcxU5GxnhE1E6soB76MbT0MBtnKJuXyAx+96Q==", + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/get-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/get-proto/-/get-proto-1.0.1.tgz", + "integrity": "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==", + "dev": true, + "license": "MIT", + "dependencies": { + "dunder-proto": "^1.0.1", + "es-object-atoms": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/get-stream": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-5.2.0.tgz", + "integrity": "sha512-nBF+F1rAZVCu/p7rjzgA+Yb4lfYXrpl7a6VmJrU8wF9I1CKvP/QwPNZHnOlwbTkY6dvtFIzFMSyQXbLoTQPRpA==", + "dev": true, + "license": "MIT", + "dependencies": { + "pump": "^3.0.0" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/glob": { + "version": "7.2.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", + "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", + "deprecated": "Old versions of glob are not supported, and contain widely publicized security vulnerabilities, which have been fixed in the current version. Please update. Support for old versions may be purchased (at exorbitant rates) by contacting i@izs.me", + "dev": true, + "license": "ISC", + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.1.1", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "engines": { + "node": "*" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/glob/node_modules/balanced-match": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", + "dev": true, + "license": "MIT" + }, + "node_modules/glob/node_modules/brace-expansion": { + "version": "1.1.14", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.14.tgz", + "integrity": "sha512-MWPGfDxnyzKU7rNOW9SP/c50vi3xrmrua/+6hfPbCS2ABNWfx24vPidzvC7krjU/RTo235sV776ymlsMtGKj8g==", + "dev": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/glob/node_modules/minimatch": { + "version": "3.1.5", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.5.tgz", + "integrity": "sha512-VgjWUsnnT6n+NUk6eZq77zeFdpW2LWDzP6zFGrCbHXiYNul5Dzqk2HHQ5uFH2DNW5Xbp8+jVzaeNt94ssEEl4w==", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/global-agent": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/global-agent/-/global-agent-3.0.0.tgz", + "integrity": "sha512-PT6XReJ+D07JvGoxQMkT6qji/jVNfX/h364XHZOWeRzy64sSFr+xJ5OX7LI3b4MPQzdL4H8Y8M0xzPpsVMwA8Q==", + "dev": true, + "license": "BSD-3-Clause", + "optional": true, + "dependencies": { + "boolean": "^3.0.1", + "es6-error": "^4.1.1", + "matcher": "^3.0.0", + "roarr": "^2.15.3", + "semver": "^7.3.2", + "serialize-error": "^7.0.1" + }, + "engines": { + "node": ">=10.0" + } + }, + "node_modules/global-agent/node_modules/semver": { + "version": "7.7.4", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.4.tgz", + "integrity": "sha512-vFKC2IEtQnVhpT78h1Yp8wzwrf8CM+MzKMHGJZfBtzhZNycRFnXsHk6E5TxIkkMsgNS7mdX3AGB7x2QM2di4lA==", + "dev": true, + "license": "ISC", + "optional": true, + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/globalthis": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/globalthis/-/globalthis-1.0.4.tgz", + "integrity": "sha512-DpLKbNU4WylpxJykQujfCcwYWiV/Jhm50Goo0wrVILAv5jOr9d+H+UR3PhSCD2rCCEIg0uc+G+muBTwD54JhDQ==", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "define-properties": "^1.2.1", + "gopd": "^1.0.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/gopd": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.2.0.tgz", + "integrity": "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/got": { + "version": "11.8.6", + "resolved": "https://registry.npmjs.org/got/-/got-11.8.6.tgz", + "integrity": "sha512-6tfZ91bOr7bOXnK7PRDCGBLa1H4U080YHNaAQ2KsMGlLEzRbk44nsZF2E1IeRc3vtJHPVbKCYgdFbaGO2ljd8g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@sindresorhus/is": "^4.0.0", + "@szmarczak/http-timer": "^4.0.5", + "@types/cacheable-request": "^6.0.1", + "@types/responselike": "^1.0.0", + "cacheable-lookup": "^5.0.3", + "cacheable-request": "^7.0.2", + "decompress-response": "^6.0.0", + "http2-wrapper": "^1.0.0-beta.5.2", + "lowercase-keys": "^2.0.0", + "p-cancelable": "^2.0.0", + "responselike": "^2.0.0" + }, + "engines": { + "node": ">=10.19.0" + }, + "funding": { + "url": "https://github.com/sindresorhus/got?sponsor=1" + } + }, + "node_modules/graceful-fs": { + "version": "4.2.11", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz", + "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==", + "license": "ISC" + }, + "node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/has-property-descriptors": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.2.tgz", + "integrity": "sha512-55JNKuIW+vq4Ke1BjOTjM2YctQIvCT7GFzHwmfZPGo5wnrgkid0YQtnAleFSqumZm4az3n2BS+erby5ipJdgrg==", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "es-define-property": "^1.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-symbols": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.1.0.tgz", + "integrity": "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-tostringtag": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.2.tgz", + "integrity": "sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==", + "dev": true, + "license": "MIT", + "dependencies": { + "has-symbols": "^1.0.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-unicode": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/has-unicode/-/has-unicode-2.0.1.tgz", + "integrity": "sha512-8Rf9Y83NBReMnx0gFzA8JImQACstCYWUplepDa9xprwwtmgEZUF0h/i5xSA625zB/I37EtrswSST6OXxwaaIJQ==", + "dev": true, + "license": "ISC" + }, + "node_modules/hasown": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.3.tgz", + "integrity": "sha512-ej4AhfhfL2Q2zpMmLo7U1Uv9+PyhIZpgQLGT1F9miIGmiCJIoCgSmczFdrc97mWT4kVY72KA+WnnhJ5pghSvSg==", + "dev": true, + "license": "MIT", + "dependencies": { + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/hosted-git-info": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-4.1.0.tgz", + "integrity": "sha512-kyCuEOWjJqZuDbRHzL8V93NzQhwIB71oFWSyzVo+KPZI+pnQPPxucdkrOZvkLRnrf5URsQM+IJ09Dw29cRALIA==", + "dev": true, + "license": "ISC", + "dependencies": { + "lru-cache": "^6.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/hosted-git-info/node_modules/lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "dev": true, + "license": "ISC", + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/hosted-git-info/node_modules/yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", + "dev": true, + "license": "ISC" + }, + "node_modules/http-cache-semantics": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/http-cache-semantics/-/http-cache-semantics-4.2.0.tgz", + "integrity": "sha512-dTxcvPXqPvXBQpq5dUr6mEMJX4oIEFv6bwom3FDwKRDsuIjjJGANqhBuoAn9c1RQJIdAKav33ED65E2ys+87QQ==", + "dev": true, + "license": "BSD-2-Clause" + }, + "node_modules/http-proxy-agent": { + "version": "7.0.2", + "resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-7.0.2.tgz", + "integrity": "sha512-T1gkAiYYDWYx3V5Bmyu7HcfcvL7mUrTWiM6yOfa3PIphViJ/gFPbvidQ+veqSOHci/PxBcDabeUNCzpOODJZig==", + "dev": true, + "license": "MIT", + "dependencies": { + "agent-base": "^7.1.0", + "debug": "^4.3.4" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/http2-wrapper": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/http2-wrapper/-/http2-wrapper-1.0.3.tgz", + "integrity": "sha512-V+23sDMr12Wnz7iTcDeJr3O6AIxlnvT/bmaAAAP/Xda35C90p9599p0F1eHR/N1KILWSoWVAiOMFjBBXaXSMxg==", + "dev": true, + "license": "MIT", + "dependencies": { + "quick-lru": "^5.1.1", + "resolve-alpn": "^1.0.0" + }, + "engines": { + "node": ">=10.19.0" + } + }, + "node_modules/https-proxy-agent": { + "version": "7.0.6", + "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-7.0.6.tgz", + "integrity": "sha512-vK9P5/iUfdl95AI+JVyUuIcVtd4ofvtrOr3HNtM2yxC9bnMbEdp3x01OhQNnjb8IJYi38VlTE3mBXwcfvywuSw==", + "dev": true, + "license": "MIT", + "dependencies": { + "agent-base": "^7.1.2", + "debug": "4" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/humanize-ms": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/humanize-ms/-/humanize-ms-1.2.1.tgz", + "integrity": "sha512-Fl70vYtsAFb/C06PTS9dZBo7ihau+Tu/DNCk/OyHhea07S+aeMWpFFkUaXRa8fI+ScZbEI8dfSxwY7gxZ9SAVQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "ms": "^2.0.0" + } + }, + "node_modules/iconv-corefoundation": { + "version": "1.1.7", + "resolved": "https://registry.npmjs.org/iconv-corefoundation/-/iconv-corefoundation-1.1.7.tgz", + "integrity": "sha512-T10qvkw0zz4wnm560lOEg0PovVqUXuOFhhHAkixw8/sycy7TJt7v/RrkEKEQnAw2viPSJu6iAkErxnzR0g8PpQ==", + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "dependencies": { + "cli-truncate": "^2.1.0", + "node-addon-api": "^1.6.3" + }, + "engines": { + "node": "^8.11.2 || >=10" + } + }, + "node_modules/iconv-lite": { + "version": "0.6.3", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz", + "integrity": "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==", + "dev": true, + "license": "MIT", + "dependencies": { + "safer-buffer": ">= 2.1.2 < 3.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ieee754": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.2.1.tgz", + "integrity": "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "BSD-3-Clause" + }, + "node_modules/imurmurhash": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", + "integrity": "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.8.19" + } + }, + "node_modules/indent-string": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/indent-string/-/indent-string-4.0.0.tgz", + "integrity": "sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/infer-owner": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/infer-owner/-/infer-owner-1.0.4.tgz", + "integrity": "sha512-IClj+Xz94+d7irH5qRyfJonOdfTzuDaifE6ZPWfx0N0+/ATZCbuTPq2prFl526urkQd90WyUKIh1DfBQ2hMz9A==", + "dev": true, + "license": "ISC" + }, + "node_modules/inflight": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==", + "deprecated": "This module is not supported, and leaks memory. Do not use it. Check out lru-cache if you want a good and tested way to coalesce async requests by a key value, which is much more comprehensive and powerful.", + "dev": true, + "license": "ISC", + "dependencies": { + "once": "^1.3.0", + "wrappy": "1" + } + }, + "node_modules/inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", + "dev": true, + "license": "ISC" + }, + "node_modules/ip-address": { + "version": "10.1.0", + "resolved": "https://registry.npmjs.org/ip-address/-/ip-address-10.1.0.tgz", + "integrity": "sha512-XXADHxXmvT9+CRxhXg56LJovE+bmWnEWB78LB83VZTprKTmaC5QfruXocxzTZ2Kl0DNwKuBdlIhjL8LeY8Sf8Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 12" + } + }, + "node_modules/is-ci": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/is-ci/-/is-ci-3.0.1.tgz", + "integrity": "sha512-ZYvCgrefwqoQ6yTyYUbQu64HsITZ3NfKX1lzaEYdkTDcfKzzCI/wthRRYKkdjHKFVgNiXKAKm65Zo1pk2as/QQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "ci-info": "^3.2.0" + }, + "bin": { + "is-ci": "bin.js" + } + }, + "node_modules/is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/is-interactive": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-interactive/-/is-interactive-1.0.0.tgz", + "integrity": "sha512-2HvIEKRoqS62guEC+qBjpvRubdX910WCMuJTZ+I9yvqKU2/12eSL549HMwtabb4oupdj2sMP50k+XJfB/8JE6w==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/is-lambda": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/is-lambda/-/is-lambda-1.0.1.tgz", + "integrity": "sha512-z7CMFGNrENq5iFB9Bqo64Xk6Y9sg+epq1myIcdHaGnbMTYOxvzsEtdYqQUylB7LxfkvgrrjP32T6Ywciio9UIQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/is-unicode-supported": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/is-unicode-supported/-/is-unicode-supported-0.1.0.tgz", + "integrity": "sha512-knxG2q4UC3u8stRGyAVJCOdxFmv5DZiRcdlIaAQXAbSfJya+OhopNotLQrstBhququ4ZpuKbDc/8S6mgXgPFPw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/isbinaryfile": { + "version": "5.0.7", + "resolved": "https://registry.npmjs.org/isbinaryfile/-/isbinaryfile-5.0.7.tgz", + "integrity": "sha512-gnWD14Jh3FzS3CPhF0AxNOJ8CxqeblPTADzI38r0wt8ZyQl5edpy75myt08EG2oKvpyiqSqsx+Wkz9vtkbTqYQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 18.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/gjtorikian/" + } + }, + "node_modules/isexe": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", + "dev": true, + "license": "ISC" + }, + "node_modules/jackspeak": { + "version": "3.4.3", + "resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-3.4.3.tgz", + "integrity": "sha512-OGlZQpz2yfahA/Rd1Y8Cd9SIEsqvXkLVoSw/cgwhnhFMDbsQFeZYoJJ7bIZBS9BcamUW96asq/npPWugM+RQBw==", + "dev": true, + "license": "BlueOak-1.0.0", + "dependencies": { + "@isaacs/cliui": "^8.0.2" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + }, + "optionalDependencies": { + "@pkgjs/parseargs": "^0.11.0" + } + }, + "node_modules/jake": { + "version": "10.9.4", + "resolved": "https://registry.npmjs.org/jake/-/jake-10.9.4.tgz", + "integrity": "sha512-wpHYzhxiVQL+IV05BLE2Xn34zW1S223hvjtqk0+gsPrwd/8JNLXJgZZM/iPFsYc1xyphF+6M6EvdE5E9MBGkDA==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "async": "^3.2.6", + "filelist": "^1.0.4", + "picocolors": "^1.1.1" + }, + "bin": { + "jake": "bin/cli.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/jiti": { + "version": "2.6.1", + "resolved": "https://registry.npmjs.org/jiti/-/jiti-2.6.1.tgz", + "integrity": "sha512-ekilCSN1jwRvIbgeg/57YFh8qQDNbwDb9xT/qu2DAHbFFZUicIl4ygVaAvzveMhMVr3LnpSKTNnwt8PoOfmKhQ==", + "dev": true, + "license": "MIT", + "bin": { + "jiti": "lib/jiti-cli.mjs" + } + }, + "node_modules/js-tokens": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", + "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/js-yaml": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.1.tgz", + "integrity": "sha512-qQKT4zQxXl8lLwBtHMWwaTcGfFOZviOJet3Oy/xmGk2gZH677CJM9EvtfdSkgWcATZhj/55JZ0rmy3myCT5lsA==", + "license": "MIT", + "dependencies": { + "argparse": "^2.0.1" + }, + "bin": { + "js-yaml": "bin/js-yaml.js" + } + }, + "node_modules/jsesc": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-3.1.0.tgz", + "integrity": "sha512-/sM3dO2FOzXjKQhJuo0Q173wf2KOo8t4I8vHy6lF9poUp7bKT0/NHE8fPX23PwfhnykfqnC2xRxOnVw5XuGIaA==", + "dev": true, + "license": "MIT", + "bin": { + "jsesc": "bin/jsesc" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/json-buffer": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/json-buffer/-/json-buffer-3.0.1.tgz", + "integrity": "sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/json-schema-traverse": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", + "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", + "dev": true, + "license": "MIT" + }, + "node_modules/json-stringify-safe": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz", + "integrity": "sha512-ZClg6AaYvamvYEE82d3Iyd3vSSIjQ+odgjaTzRuO3s7toCdFKczob2i0zCh7JE8kWn17yvAWhUVxvqGwUalsRA==", + "dev": true, + "license": "ISC", + "optional": true + }, + "node_modules/json5": { + "version": "2.2.3", + "resolved": "https://registry.npmjs.org/json5/-/json5-2.2.3.tgz", + "integrity": "sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg==", + "dev": true, + "license": "MIT", + "bin": { + "json5": "lib/cli.js" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/jsonfile": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-4.0.0.tgz", + "integrity": "sha512-m6F1R3z8jjlf2imQHS2Qez5sjKWQzbuuhuJ/FKYFRZvPE3PuHcSMVZzfsLhGVOkfd20obL5SWEBew5ShlquNxg==", + "dev": true, + "license": "MIT", + "optionalDependencies": { + "graceful-fs": "^4.1.6" + } + }, + "node_modules/keyv": { + "version": "4.5.4", + "resolved": "https://registry.npmjs.org/keyv/-/keyv-4.5.4.tgz", + "integrity": "sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw==", + "dev": true, + "license": "MIT", + "dependencies": { + "json-buffer": "3.0.1" + } + }, + "node_modules/lazy-val": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/lazy-val/-/lazy-val-1.0.5.tgz", + "integrity": "sha512-0/BnGCCfyUMkBpeDgWihanIAF9JmZhHBgUhEqzvf+adhNGLoP6TaiI5oF8oyb3I45P+PcnrqihSf01M0l0G5+Q==", + "license": "MIT" + }, + "node_modules/lightningcss": { + "version": "1.32.0", + "resolved": "https://registry.npmjs.org/lightningcss/-/lightningcss-1.32.0.tgz", + "integrity": "sha512-NXYBzinNrblfraPGyrbPoD19C1h9lfI/1mzgWYvXUTe414Gz/X1FD2XBZSZM7rRTrMA8JL3OtAaGifrIKhQ5yQ==", + "dev": true, + "license": "MPL-2.0", + "dependencies": { + "detect-libc": "^2.0.3" + }, + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + }, + "optionalDependencies": { + "lightningcss-android-arm64": "1.32.0", + "lightningcss-darwin-arm64": "1.32.0", + "lightningcss-darwin-x64": "1.32.0", + "lightningcss-freebsd-x64": "1.32.0", + "lightningcss-linux-arm-gnueabihf": "1.32.0", + "lightningcss-linux-arm64-gnu": "1.32.0", + "lightningcss-linux-arm64-musl": "1.32.0", + "lightningcss-linux-x64-gnu": "1.32.0", + "lightningcss-linux-x64-musl": "1.32.0", + "lightningcss-win32-arm64-msvc": "1.32.0", + "lightningcss-win32-x64-msvc": "1.32.0" + } + }, + "node_modules/lightningcss-android-arm64": { + "version": "1.32.0", + "resolved": "https://registry.npmjs.org/lightningcss-android-arm64/-/lightningcss-android-arm64-1.32.0.tgz", + "integrity": "sha512-YK7/ClTt4kAK0vo6w3X+Pnm0D2cf2vPHbhOXdoNti1Ga0al1P4TBZhwjATvjNwLEBCnKvjJc2jQgHXH0NEwlAg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-darwin-arm64": { + "version": "1.32.0", + "resolved": "https://registry.npmjs.org/lightningcss-darwin-arm64/-/lightningcss-darwin-arm64-1.32.0.tgz", + "integrity": "sha512-RzeG9Ju5bag2Bv1/lwlVJvBE3q6TtXskdZLLCyfg5pt+HLz9BqlICO7LZM7VHNTTn/5PRhHFBSjk5lc4cmscPQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-darwin-x64": { + "version": "1.32.0", + "resolved": "https://registry.npmjs.org/lightningcss-darwin-x64/-/lightningcss-darwin-x64-1.32.0.tgz", + "integrity": "sha512-U+QsBp2m/s2wqpUYT/6wnlagdZbtZdndSmut/NJqlCcMLTWp5muCrID+K5UJ6jqD2BFshejCYXniPDbNh73V8w==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-freebsd-x64": { + "version": "1.32.0", + "resolved": "https://registry.npmjs.org/lightningcss-freebsd-x64/-/lightningcss-freebsd-x64-1.32.0.tgz", + "integrity": "sha512-JCTigedEksZk3tHTTthnMdVfGf61Fky8Ji2E4YjUTEQX14xiy/lTzXnu1vwiZe3bYe0q+SpsSH/CTeDXK6WHig==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-linux-arm-gnueabihf": { + "version": "1.32.0", + "resolved": "https://registry.npmjs.org/lightningcss-linux-arm-gnueabihf/-/lightningcss-linux-arm-gnueabihf-1.32.0.tgz", + "integrity": "sha512-x6rnnpRa2GL0zQOkt6rts3YDPzduLpWvwAF6EMhXFVZXD4tPrBkEFqzGowzCsIWsPjqSK+tyNEODUBXeeVHSkw==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-linux-arm64-gnu": { + "version": "1.32.0", + "resolved": "https://registry.npmjs.org/lightningcss-linux-arm64-gnu/-/lightningcss-linux-arm64-gnu-1.32.0.tgz", + "integrity": "sha512-0nnMyoyOLRJXfbMOilaSRcLH3Jw5z9HDNGfT/gwCPgaDjnx0i8w7vBzFLFR1f6CMLKF8gVbebmkUN3fa/kQJpQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-linux-arm64-musl": { + "version": "1.32.0", + "resolved": "https://registry.npmjs.org/lightningcss-linux-arm64-musl/-/lightningcss-linux-arm64-musl-1.32.0.tgz", + "integrity": "sha512-UpQkoenr4UJEzgVIYpI80lDFvRmPVg6oqboNHfoH4CQIfNA+HOrZ7Mo7KZP02dC6LjghPQJeBsvXhJod/wnIBg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-linux-x64-gnu": { + "version": "1.32.0", + "resolved": "https://registry.npmjs.org/lightningcss-linux-x64-gnu/-/lightningcss-linux-x64-gnu-1.32.0.tgz", + "integrity": "sha512-V7Qr52IhZmdKPVr+Vtw8o+WLsQJYCTd8loIfpDaMRWGUZfBOYEJeyJIkqGIDMZPwPx24pUMfwSxxI8phr/MbOA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-linux-x64-musl": { + "version": "1.32.0", + "resolved": "https://registry.npmjs.org/lightningcss-linux-x64-musl/-/lightningcss-linux-x64-musl-1.32.0.tgz", + "integrity": "sha512-bYcLp+Vb0awsiXg/80uCRezCYHNg1/l3mt0gzHnWV9XP1W5sKa5/TCdGWaR/zBM2PeF/HbsQv/j2URNOiVuxWg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-win32-arm64-msvc": { + "version": "1.32.0", + "resolved": "https://registry.npmjs.org/lightningcss-win32-arm64-msvc/-/lightningcss-win32-arm64-msvc-1.32.0.tgz", + "integrity": "sha512-8SbC8BR40pS6baCM8sbtYDSwEVQd4JlFTOlaD3gWGHfThTcABnNDBda6eTZeqbofalIJhFx0qKzgHJmcPTnGdw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-win32-x64-msvc": { + "version": "1.32.0", + "resolved": "https://registry.npmjs.org/lightningcss-win32-x64-msvc/-/lightningcss-win32-x64-msvc-1.32.0.tgz", + "integrity": "sha512-Amq9B/SoZYdDi1kFrojnoqPLxYhQ4Wo5XiL8EVJrVsB8ARoC1PWW6VGtT0WKCemjy8aC+louJnjS7U18x3b06Q==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lodash": { + "version": "4.18.1", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.18.1.tgz", + "integrity": "sha512-dMInicTPVE8d1e5otfwmmjlxkZoUpiVLwyeTdUsi/Caj/gfzzblBcCE5sRHV/AsjuCmxWrte2TNGSYuCeCq+0Q==", + "dev": true, + "license": "MIT" + }, + "node_modules/lodash.escaperegexp": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/lodash.escaperegexp/-/lodash.escaperegexp-4.1.2.tgz", + "integrity": "sha512-TM9YBvyC84ZxE3rgfefxUWiQKLilstD6k7PTGt6wfbtXF8ixIJLOL3VYyV/z+ZiPLsVxAsKAFVwWlWeb2Y8Yyw==", + "license": "MIT" + }, + "node_modules/lodash.isequal": { + "version": "4.5.0", + "resolved": "https://registry.npmjs.org/lodash.isequal/-/lodash.isequal-4.5.0.tgz", + "integrity": "sha512-pDo3lu8Jhfjqls6GkMgpahsF9kCyayhgykjyLMNFTKWrpVdAQtYyB4muAMWozBB4ig/dtWAmsMxLEI8wuz+DYQ==", + "deprecated": "This package is deprecated. Use require('node:util').isDeepStrictEqual instead.", + "license": "MIT" + }, + "node_modules/log-symbols": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/log-symbols/-/log-symbols-4.1.0.tgz", + "integrity": "sha512-8XPvpAA8uyhfteu8pIvQxpJZ7SYYdpUivZpGy6sFsBuKRY/7rQGavedeB8aK+Zkyq6upMFVL/9AW6vOYzfRyLg==", + "dev": true, + "license": "MIT", + "dependencies": { + "chalk": "^4.1.0", + "is-unicode-supported": "^0.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/lowercase-keys": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/lowercase-keys/-/lowercase-keys-2.0.0.tgz", + "integrity": "sha512-tqNXrS78oMOE73NMxK4EMLQsQowWf8jKooH9g7xPavRT706R6bkQJ6DY2Te7QukaZsulxa30wQ7bk0pm4XiHmA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/lru-cache": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz", + "integrity": "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==", + "dev": true, + "license": "ISC", + "dependencies": { + "yallist": "^3.0.2" + } + }, + "node_modules/lucide-react": { + "version": "1.8.0", + "resolved": "https://registry.npmjs.org/lucide-react/-/lucide-react-1.8.0.tgz", + "integrity": "sha512-WuvlsjngSk7TnTBJ1hsCy3ql9V9VOdcPkd3PKcSmM34vJD8KG6molxz7m7zbYFgICwsanQWmJ13JlYs4Zp7Arw==", + "license": "ISC", + "peerDependencies": { + "react": "^16.5.1 || ^17.0.0 || ^18.0.0 || ^19.0.0" + } + }, + "node_modules/magic-string": { + "version": "0.30.21", + "resolved": "https://registry.npmjs.org/magic-string/-/magic-string-0.30.21.tgz", + "integrity": "sha512-vd2F4YUyEXKGcLHoq+TEyCjxueSeHnFxyyjNp80yg0XV4vUhnDer/lvvlqM/arB5bXQN5K2/3oinyCRyx8T2CQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/sourcemap-codec": "^1.5.5" + } + }, + "node_modules/make-fetch-happen": { + "version": "10.2.1", + "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-10.2.1.tgz", + "integrity": "sha512-NgOPbRiaQM10DYXvN3/hhGVI2M5MtITFryzBGxHM5p4wnFxsVCbxkrBrDsk+EZ5OB4jEOT7AjDxtdF+KVEFT7w==", + "dev": true, + "license": "ISC", + "dependencies": { + "agentkeepalive": "^4.2.1", + "cacache": "^16.1.0", + "http-cache-semantics": "^4.1.0", + "http-proxy-agent": "^5.0.0", + "https-proxy-agent": "^5.0.0", + "is-lambda": "^1.0.1", + "lru-cache": "^7.7.1", + "minipass": "^3.1.6", + "minipass-collect": "^1.0.2", + "minipass-fetch": "^2.0.3", + "minipass-flush": "^1.0.5", + "minipass-pipeline": "^1.2.4", + "negotiator": "^0.6.3", + "promise-retry": "^2.0.1", + "socks-proxy-agent": "^7.0.0", + "ssri": "^9.0.0" + }, + "engines": { + "node": "^12.13.0 || ^14.15.0 || >=16.0.0" + } + }, + "node_modules/make-fetch-happen/node_modules/agent-base": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.2.tgz", + "integrity": "sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "debug": "4" + }, + "engines": { + "node": ">= 6.0.0" + } + }, + "node_modules/make-fetch-happen/node_modules/http-proxy-agent": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-5.0.0.tgz", + "integrity": "sha512-n2hY8YdoRE1i7r6M0w9DIw5GgZN0G25P8zLCRQ8rjXtTU3vsNFBI/vWK/UIeE6g5MUUz6avwAPXmL6Fy9D/90w==", + "dev": true, + "license": "MIT", + "dependencies": { + "@tootallnate/once": "2", + "agent-base": "6", + "debug": "4" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/make-fetch-happen/node_modules/https-proxy-agent": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-5.0.1.tgz", + "integrity": "sha512-dFcAjpTQFgoLMzC2VwU+C/CbS7uRL0lWmxDITmqm7C+7F0Odmj6s9l6alZc6AELXhrnggM2CeWSXHGOdX2YtwA==", + "dev": true, + "license": "MIT", + "dependencies": { + "agent-base": "6", + "debug": "4" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/make-fetch-happen/node_modules/lru-cache": { + "version": "7.18.3", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.18.3.tgz", + "integrity": "sha512-jumlc0BIUrS3qJGgIkWZsyfAM7NCWiBcCDhnd+3NNM5KbBmLTgHVfWBcg6W+rLUsIpzpERPsvwUP7CckAQSOoA==", + "dev": true, + "license": "ISC", + "engines": { + "node": ">=12" + } + }, + "node_modules/matcher": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/matcher/-/matcher-3.0.0.tgz", + "integrity": "sha512-OkeDaAZ/bQCxeFAozM55PKcKU0yJMPGifLwV4Qgjitu+5MoAfSQN4lsLJeXZ1b8w0x+/Emda6MZgXS1jvsapng==", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "escape-string-regexp": "^4.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/math-intrinsics": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz", + "integrity": "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/mime": { + "version": "2.6.0", + "resolved": "https://registry.npmjs.org/mime/-/mime-2.6.0.tgz", + "integrity": "sha512-USPkMeET31rOMiarsBNIHZKLGgvKc/LrjofAnBlOttf5ajRvqiRA8QsenbcooctK6d6Ts6aqZXBA+XbkKthiQg==", + "dev": true, + "license": "MIT", + "bin": { + "mime": "cli.js" + }, + "engines": { + "node": ">=4.0.0" + } + }, + "node_modules/mime-db": { + "version": "1.52.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", + "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/mime-types": { + "version": "2.1.35", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", + "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", + "dev": true, + "license": "MIT", + "dependencies": { + "mime-db": "1.52.0" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/mimic-fn": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz", + "integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/mimic-response": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/mimic-response/-/mimic-response-1.0.1.tgz", + "integrity": "sha512-j5EctnkH7amfV/q5Hgmoal1g2QHFJRraOtmx0JpIqkxhBhI/lJSl1nMpQ45hVarwNETOoWEimndZ4QK0RHxuxQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=4" + } + }, + "node_modules/minimatch": { + "version": "10.2.5", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-10.2.5.tgz", + "integrity": "sha512-MULkVLfKGYDFYejP07QOurDLLQpcjk7Fw+7jXS2R2czRQzR56yHRveU5NDJEOviH+hETZKSkIk5c+T23GjFUMg==", + "dev": true, + "license": "BlueOak-1.0.0", + "dependencies": { + "brace-expansion": "^5.0.5" + }, + "engines": { + "node": "18 || 20 || >=22" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/minimist": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.8.tgz", + "integrity": "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==", + "dev": true, + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/minipass": { + "version": "3.3.6", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz", + "integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==", + "dev": true, + "license": "ISC", + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/minipass-collect": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/minipass-collect/-/minipass-collect-1.0.2.tgz", + "integrity": "sha512-6T6lH0H8OG9kITm/Jm6tdooIbogG9e0tLgpY6mphXSm/A9u8Nq1ryBG+Qspiub9LjWlBPsPS3tWQ/Botq4FdxA==", + "dev": true, + "license": "ISC", + "dependencies": { + "minipass": "^3.0.0" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/minipass-fetch": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/minipass-fetch/-/minipass-fetch-2.1.2.tgz", + "integrity": "sha512-LT49Zi2/WMROHYoqGgdlQIZh8mLPZmOrN2NdJjMXxYe4nkN6FUyuPuOAOedNJDrx0IRGg9+4guZewtp8hE6TxA==", + "dev": true, + "license": "MIT", + "dependencies": { + "minipass": "^3.1.6", + "minipass-sized": "^1.0.3", + "minizlib": "^2.1.2" + }, + "engines": { + "node": "^12.13.0 || ^14.15.0 || >=16.0.0" + }, + "optionalDependencies": { + "encoding": "^0.1.13" + } + }, + "node_modules/minipass-flush": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/minipass-flush/-/minipass-flush-1.0.7.tgz", + "integrity": "sha512-TbqTz9cUwWyHS2Dy89P3ocAGUGxKjjLuR9z8w4WUTGAVgEj17/4nhgo2Du56i0Fm3Pm30g4iA8Lcqctc76jCzA==", + "dev": true, + "license": "BlueOak-1.0.0", + "dependencies": { + "minipass": "^3.0.0" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/minipass-pipeline": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/minipass-pipeline/-/minipass-pipeline-1.2.4.tgz", + "integrity": "sha512-xuIq7cIOt09RPRJ19gdi4b+RiNvDFYe5JH+ggNvBqGqpQXcru3PcRmOZuHBKWK1Txf9+cQ+HMVN4d6z46LZP7A==", + "dev": true, + "license": "ISC", + "dependencies": { + "minipass": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/minipass-sized": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/minipass-sized/-/minipass-sized-1.0.3.tgz", + "integrity": "sha512-MbkQQ2CTiBMlA2Dm/5cY+9SWFEN8pzzOXi6rlM5Xxq0Yqbda5ZQy9sU75a673FE9ZK0Zsbr6Y5iP6u9nktfg2g==", + "dev": true, + "license": "ISC", + "dependencies": { + "minipass": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/minipass/node_modules/yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", + "dev": true, + "license": "ISC" + }, + "node_modules/minizlib": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-2.1.2.tgz", + "integrity": "sha512-bAxsR8BVfj60DWXHE3u30oHzfl4G7khkSuPW+qvpd7jFRHm7dLxOjUk1EHACJ/hxLY8phGJ0YhYHZo7jil7Qdg==", + "dev": true, + "license": "MIT", + "dependencies": { + "minipass": "^3.0.0", + "yallist": "^4.0.0" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/minizlib/node_modules/yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", + "dev": true, + "license": "ISC" + }, + "node_modules/mkdirp": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.4.tgz", + "integrity": "sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==", + "dev": true, + "license": "MIT", + "bin": { + "mkdirp": "bin/cmd.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "license": "MIT" + }, + "node_modules/nanoid": { + "version": "3.3.11", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.11.tgz", + "integrity": "sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "bin": { + "nanoid": "bin/nanoid.cjs" + }, + "engines": { + "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1" + } + }, + "node_modules/negotiator": { + "version": "0.6.4", + "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.4.tgz", + "integrity": "sha512-myRT3DiWPHqho5PrJaIRyaMv2kgYf0mUVgBNOYMuCH5Ki1yEiQaf/ZJuQ62nvpc44wL5WDbTX7yGJi1Neevw8w==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/node-abi": { + "version": "3.89.0", + "resolved": "https://registry.npmjs.org/node-abi/-/node-abi-3.89.0.tgz", + "integrity": "sha512-6u9UwL0HlAl21+agMN3YAMXcKByMqwGx+pq+P76vii5f7hTPtKDp08/H9py6DY+cfDw7kQNTGEj/rly3IgbNQA==", + "dev": true, + "license": "MIT", + "dependencies": { + "semver": "^7.3.5" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/node-abi/node_modules/semver": { + "version": "7.7.4", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.4.tgz", + "integrity": "sha512-vFKC2IEtQnVhpT78h1Yp8wzwrf8CM+MzKMHGJZfBtzhZNycRFnXsHk6E5TxIkkMsgNS7mdX3AGB7x2QM2di4lA==", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/node-addon-api": { + "version": "1.7.2", + "resolved": "https://registry.npmjs.org/node-addon-api/-/node-addon-api-1.7.2.tgz", + "integrity": "sha512-ibPK3iA+vaY1eEjESkQkM0BbCqFOaZMiXRTtdB0u7b4djtY6JnsjvPdUHVMg6xQt3B8fpTTWHI9A+ADjM9frzg==", + "dev": true, + "license": "MIT", + "optional": true + }, + "node_modules/node-api-version": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/node-api-version/-/node-api-version-0.2.1.tgz", + "integrity": "sha512-2xP/IGGMmmSQpI1+O/k72jF/ykvZ89JeuKX3TLJAYPDVLUalrshrLHkeVcCCZqG/eEa635cr8IBYzgnDvM2O8Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "semver": "^7.3.5" + } + }, + "node_modules/node-api-version/node_modules/semver": { + "version": "7.7.4", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.4.tgz", + "integrity": "sha512-vFKC2IEtQnVhpT78h1Yp8wzwrf8CM+MzKMHGJZfBtzhZNycRFnXsHk6E5TxIkkMsgNS7mdX3AGB7x2QM2di4lA==", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/node-gyp": { + "version": "9.4.1", + "resolved": "https://registry.npmjs.org/node-gyp/-/node-gyp-9.4.1.tgz", + "integrity": "sha512-OQkWKbjQKbGkMf/xqI1jjy3oCTgMKJac58G2+bjZb3fza6gW2YrCSdMQYaoTb70crvE//Gngr4f0AgVHmqHvBQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "env-paths": "^2.2.0", + "exponential-backoff": "^3.1.1", + "glob": "^7.1.4", + "graceful-fs": "^4.2.6", + "make-fetch-happen": "^10.0.3", + "nopt": "^6.0.0", + "npmlog": "^6.0.0", + "rimraf": "^3.0.2", + "semver": "^7.3.5", + "tar": "^6.1.2", + "which": "^2.0.2" + }, + "bin": { + "node-gyp": "bin/node-gyp.js" + }, + "engines": { + "node": "^12.13 || ^14.13 || >=16" + } + }, + "node_modules/node-gyp/node_modules/semver": { + "version": "7.7.4", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.4.tgz", + "integrity": "sha512-vFKC2IEtQnVhpT78h1Yp8wzwrf8CM+MzKMHGJZfBtzhZNycRFnXsHk6E5TxIkkMsgNS7mdX3AGB7x2QM2di4lA==", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/node-releases": { + "version": "2.0.37", + "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.37.tgz", + "integrity": "sha512-1h5gKZCF+pO/o3Iqt5Jp7wc9rH3eJJ0+nh/CIoiRwjRxde/hAHyLPXYN4V3CqKAbiZPSeJFSWHmJsbkicta0Eg==", + "dev": true, + "license": "MIT" + }, + "node_modules/nopt": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/nopt/-/nopt-6.0.0.tgz", + "integrity": "sha512-ZwLpbTgdhuZUnZzjd7nb1ZV+4DoiC6/sfiVKok72ym/4Tlf+DFdlHYmT2JPmcNNWV6Pi3SDf1kT+A4r9RTuT9g==", + "dev": true, + "license": "ISC", + "dependencies": { + "abbrev": "^1.0.0" + }, + "bin": { + "nopt": "bin/nopt.js" + }, + "engines": { + "node": "^12.13.0 || ^14.15.0 || >=16.0.0" + } + }, + "node_modules/normalize-url": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/normalize-url/-/normalize-url-6.1.0.tgz", + "integrity": "sha512-DlL+XwOy3NxAQ8xuC0okPgK46iuVNAK01YN7RueYBqqFeGsBjV9XmCAzAdgt+667bCl5kPh9EqKKDwnaPG1I7A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/npmlog": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/npmlog/-/npmlog-6.0.2.tgz", + "integrity": "sha512-/vBvz5Jfr9dT/aFWd0FIRf+T/Q2WBsLENygUaFUqstqsycmZAP/t5BvFJTK0viFmSUxiUKTUplWy5vt+rvKIxg==", + "deprecated": "This package is no longer supported.", + "dev": true, + "license": "ISC", + "dependencies": { + "are-we-there-yet": "^3.0.0", + "console-control-strings": "^1.1.0", + "gauge": "^4.0.3", + "set-blocking": "^2.0.0" + }, + "engines": { + "node": "^12.13.0 || ^14.15.0 || >=16.0.0" + } + }, + "node_modules/object-keys": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/object-keys/-/object-keys-1.1.1.tgz", + "integrity": "sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==", + "dev": true, + "license": "MIT", + "optional": true, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", + "dev": true, + "license": "ISC", + "dependencies": { + "wrappy": "1" + } + }, + "node_modules/onetime": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.2.tgz", + "integrity": "sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==", + "dev": true, + "license": "MIT", + "dependencies": { + "mimic-fn": "^2.1.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/ora": { + "version": "5.4.1", + "resolved": "https://registry.npmjs.org/ora/-/ora-5.4.1.tgz", + "integrity": "sha512-5b6Y85tPxZZ7QytO+BQzysW31HJku27cRIlkbAXaNx+BdcVi+LlRFmVXzeF6a7JCwJpyw5c4b+YSVImQIrBpuQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "bl": "^4.1.0", + "chalk": "^4.1.0", + "cli-cursor": "^3.1.0", + "cli-spinners": "^2.5.0", + "is-interactive": "^1.0.0", + "is-unicode-supported": "^0.1.0", + "log-symbols": "^4.1.0", + "strip-ansi": "^6.0.0", + "wcwidth": "^1.0.1" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-cancelable": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/p-cancelable/-/p-cancelable-2.1.1.tgz", + "integrity": "sha512-BZOr3nRQHOntUjTrH8+Lh54smKHoHyur8We1V8DSMVrl5A2malOOwuJRnKRDjSnkoeBh4at6BwEnb5I7Jl31wg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/p-limit": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", + "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "yocto-queue": "^0.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-map": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/p-map/-/p-map-4.0.0.tgz", + "integrity": "sha512-/bjOqmgETBYB5BoEeGVea8dmvHb2m9GLy1E9W43yeyfP6QQCZGFNa+XRceJEuDB6zqr+gKpIAmlLebMpykw/MQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "aggregate-error": "^3.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/package-json-from-dist": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/package-json-from-dist/-/package-json-from-dist-1.0.1.tgz", + "integrity": "sha512-UEZIS3/by4OC8vL3P2dTXRETpebLI2NiI5vIrjaD/5UtrkFX/tNbwjTSRAGC/+7CAo2pIcBaRgWmcBBHcsaCIw==", + "dev": true, + "license": "BlueOak-1.0.0" + }, + "node_modules/path-is-absolute": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/path-key": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", + "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/path-scurry": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-1.11.1.tgz", + "integrity": "sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA==", + "dev": true, + "license": "BlueOak-1.0.0", + "dependencies": { + "lru-cache": "^10.2.0", + "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0" + }, + "engines": { + "node": ">=16 || 14 >=14.18" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/path-scurry/node_modules/lru-cache": { + "version": "10.4.3", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.4.3.tgz", + "integrity": "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ==", + "dev": true, + "license": "ISC" + }, + "node_modules/path-scurry/node_modules/minipass": { + "version": "7.1.3", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.1.3.tgz", + "integrity": "sha512-tEBHqDnIoM/1rXME1zgka9g6Q2lcoCkxHLuc7ODJ5BxbP5d4c2Z5cGgtXAku59200Cx7diuHTOYfSBD8n6mm8A==", + "dev": true, + "license": "BlueOak-1.0.0", + "engines": { + "node": ">=16 || 14 >=14.17" + } + }, + "node_modules/pe-library": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/pe-library/-/pe-library-0.4.1.tgz", + "integrity": "sha512-eRWB5LBz7PpDu4PUlwT0PhnQfTQJlDDdPa35urV4Osrm0t0AqQFGn+UIkU3klZvwJ8KPO3VbBFsXquA6p6kqZw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12", + "npm": ">=6" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/jet2jet" + } + }, + "node_modules/pend": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/pend/-/pend-1.2.0.tgz", + "integrity": "sha512-F3asv42UuXchdzt+xXqfW1OGlVBe+mxa2mqI0pg5yAHZPvFmY3Y6drSf/GQ1A86WgWEN9Kzh/WrgKa6iGcHXLg==", + "dev": true, + "license": "MIT" + }, + "node_modules/picocolors": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz", + "integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==", + "dev": true, + "license": "ISC" + }, + "node_modules/picomatch": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.4.tgz", + "integrity": "sha512-QP88BAKvMam/3NxH6vj2o21R6MjxZUAd6nlwAS/pnGvN9IVLocLHxGYIzFhg6fUQ+5th6P4dv4eW9jX3DSIj7A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/plist": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/plist/-/plist-3.1.0.tgz", + "integrity": "sha512-uysumyrvkUX0rX/dEVqt8gC3sTBzd4zoWfLeS29nb53imdaXVvLINYXTI2GNqzaMuvacNx4uJQ8+b3zXR0pkgQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@xmldom/xmldom": "^0.8.8", + "base64-js": "^1.5.1", + "xmlbuilder": "^15.1.1" + }, + "engines": { + "node": ">=10.4.0" + } + }, + "node_modules/postcss": { + "version": "8.5.10", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.5.10.tgz", + "integrity": "sha512-pMMHxBOZKFU6HgAZ4eyGnwXF/EvPGGqUr0MnZ5+99485wwW41kW91A4LOGxSHhgugZmSChL5AlElNdwlNgcnLQ==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/postcss" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "nanoid": "^3.3.11", + "picocolors": "^1.1.1", + "source-map-js": "^1.2.1" + }, + "engines": { + "node": "^10 || ^12 || >=14" + } + }, + "node_modules/progress": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/progress/-/progress-2.0.3.tgz", + "integrity": "sha512-7PiHtLll5LdnKIMw100I+8xJXR5gW2QwWYkT6iJva0bXitZKa/XMrSbdmg3r2Xnaidz9Qumd0VPaMrZlF9V9sA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/promise-inflight": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/promise-inflight/-/promise-inflight-1.0.1.tgz", + "integrity": "sha512-6zWPyEOFaQBJYcGMHBKTKJ3u6TBsnMFOIZSa6ce1e/ZrrsOlnHRHbabMjLiBYKp+n44X9eUI6VUPaukCXHuG4g==", + "dev": true, + "license": "ISC" + }, + "node_modules/promise-retry": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/promise-retry/-/promise-retry-2.0.1.tgz", + "integrity": "sha512-y+WKFlBR8BGXnsNlIHFGPZmyDf3DFMoLhaflAnyZgV6rG6xu+JwesTo2Q9R6XwYmtmwAFCkAk3e35jEdoeh/3g==", + "dev": true, + "license": "MIT", + "dependencies": { + "err-code": "^2.0.2", + "retry": "^0.12.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/pump": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/pump/-/pump-3.0.4.tgz", + "integrity": "sha512-VS7sjc6KR7e1ukRFhQSY5LM2uBWAUPiOPa/A3mkKmiMwSmRFUITt0xuj+/lesgnCv+dPIEYlkzrcyXgquIHMcA==", + "dev": true, + "license": "MIT", + "dependencies": { + "end-of-stream": "^1.1.0", + "once": "^1.3.1" + } + }, + "node_modules/punycode": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz", + "integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/quick-lru": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/quick-lru/-/quick-lru-5.1.1.tgz", + "integrity": "sha512-WuyALRjWPDGtt/wzJiadO5AXY+8hZ80hVpe6MyivgraREW751X3SbhRvG3eLKOYN+8VEvqLcf3wdnt44Z4S4SA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/react": { + "version": "19.2.5", + "resolved": "https://registry.npmjs.org/react/-/react-19.2.5.tgz", + "integrity": "sha512-llUJLzz1zTUBrskt2pwZgLq59AemifIftw4aB7JxOqf1HY2FDaGDxgwpAPVzHU1kdWabH7FauP4i1oEeer2WCA==", + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/react-dom": { + "version": "19.2.5", + "resolved": "https://registry.npmjs.org/react-dom/-/react-dom-19.2.5.tgz", + "integrity": "sha512-J5bAZz+DXMMwW/wV3xzKke59Af6CHY7G4uYLN1OvBcKEsWOs4pQExj86BBKamxl/Ik5bx9whOrvBlSDfWzgSag==", + "license": "MIT", + "dependencies": { + "scheduler": "^0.27.0" + }, + "peerDependencies": { + "react": "^19.2.5" + } + }, + "node_modules/react-refresh": { + "version": "0.17.0", + "resolved": "https://registry.npmjs.org/react-refresh/-/react-refresh-0.17.0.tgz", + "integrity": "sha512-z6F7K9bV85EfseRCp2bzrpyQ0Gkw1uLoCel9XBVWPg/TjRj94SkJzUTGfOa4bs7iJvBWtQG0Wq7wnI0syw3EBQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/react-remove-scroll": { + "version": "2.7.2", + "resolved": "https://registry.npmjs.org/react-remove-scroll/-/react-remove-scroll-2.7.2.tgz", + "integrity": "sha512-Iqb9NjCCTt6Hf+vOdNIZGdTiH1QSqr27H/Ek9sv/a97gfueI/5h1s3yRi1nngzMUaOOToin5dI1dXKdXiF+u0Q==", + "license": "MIT", + "dependencies": { + "react-remove-scroll-bar": "^2.3.7", + "react-style-singleton": "^2.2.3", + "tslib": "^2.1.0", + "use-callback-ref": "^1.3.3", + "use-sidecar": "^1.1.3" + }, + "engines": { + "node": ">=10" + }, + "peerDependencies": { + "@types/react": "*", + "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/react-remove-scroll-bar": { + "version": "2.3.8", + "resolved": "https://registry.npmjs.org/react-remove-scroll-bar/-/react-remove-scroll-bar-2.3.8.tgz", + "integrity": "sha512-9r+yi9+mgU33AKcj6IbT9oRCO78WriSj6t/cF8DWBZJ9aOGPOTEDvdUDz1FwKim7QXWwmHqtdHnRJfhAxEG46Q==", + "license": "MIT", + "dependencies": { + "react-style-singleton": "^2.2.2", + "tslib": "^2.0.0" + }, + "engines": { + "node": ">=10" + }, + "peerDependencies": { + "@types/react": "*", + "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/react-router": { + "version": "7.14.1", + "resolved": "https://registry.npmjs.org/react-router/-/react-router-7.14.1.tgz", + "integrity": "sha512-5BCvFskyAAVumqhEKh/iPhLOIkfxcEUz8WqFIARCkMg8hZZzDYX9CtwxXA0e+qT8zAxmMC0x3Ckb9iMONwc5jg==", + "license": "MIT", + "dependencies": { + "cookie": "^1.0.1", + "set-cookie-parser": "^2.6.0" + }, + "engines": { + "node": ">=20.0.0" + }, + "peerDependencies": { + "react": ">=18", + "react-dom": ">=18" + }, + "peerDependenciesMeta": { + "react-dom": { + "optional": true + } + } + }, + "node_modules/react-router-dom": { + "version": "7.14.1", + "resolved": "https://registry.npmjs.org/react-router-dom/-/react-router-dom-7.14.1.tgz", + "integrity": "sha512-ZkrQuwwhGibjQLqH1eCdyiZyLWglPxzxdl5tgwgKEyCSGC76vmAjleGocRe3J/MLfzMUIKwaFJWpFVJhK3d2xA==", + "license": "MIT", + "dependencies": { + "react-router": "7.14.1" + }, + "engines": { + "node": ">=20.0.0" + }, + "peerDependencies": { + "react": ">=18", + "react-dom": ">=18" + } + }, + "node_modules/react-style-singleton": { + "version": "2.2.3", + "resolved": "https://registry.npmjs.org/react-style-singleton/-/react-style-singleton-2.2.3.tgz", + "integrity": "sha512-b6jSvxvVnyptAiLjbkWLE/lOnR4lfTtDAl+eUC7RZy+QQWc6wRzIV2CE6xBuMmDxc2qIihtDCZD5NPOFl7fRBQ==", + "license": "MIT", + "dependencies": { + "get-nonce": "^1.0.0", + "tslib": "^2.0.0" + }, + "engines": { + "node": ">=10" + }, + "peerDependencies": { + "@types/react": "*", + "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/read-binary-file-arch": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/read-binary-file-arch/-/read-binary-file-arch-1.0.6.tgz", + "integrity": "sha512-BNg9EN3DD3GsDXX7Aa8O4p92sryjkmzYYgmgTAc6CA4uGLEDzFfxOxugu21akOxpcXHiEgsYkC6nPsQvLLLmEg==", + "dev": true, + "license": "MIT", + "dependencies": { + "debug": "^4.3.4" + }, + "bin": { + "read-binary-file-arch": "cli.js" + } + }, + "node_modules/readable-stream": { + "version": "3.6.2", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz", + "integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==", + "dev": true, + "license": "MIT", + "dependencies": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/require-directory": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", + "integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/resedit": { + "version": "1.7.2", + "resolved": "https://registry.npmjs.org/resedit/-/resedit-1.7.2.tgz", + "integrity": "sha512-vHjcY2MlAITJhC0eRD/Vv8Vlgmu9Sd3LX9zZvtGzU5ZImdTN3+d6e/4mnTyV8vEbyf1sgNIrWxhWlrys52OkEA==", + "dev": true, + "license": "MIT", + "dependencies": { + "pe-library": "^0.4.1" + }, + "engines": { + "node": ">=12", + "npm": ">=6" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/jet2jet" + } + }, + "node_modules/resolve-alpn": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/resolve-alpn/-/resolve-alpn-1.2.1.tgz", + "integrity": "sha512-0a1F4l73/ZFZOakJnQ3FvkJ2+gSTQWz/r2KE5OdDY0TxPm5h4GkqkWWfM47T7HsbnOtcJVEF4epCVy6u7Q3K+g==", + "dev": true, + "license": "MIT" + }, + "node_modules/responselike": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/responselike/-/responselike-2.0.1.tgz", + "integrity": "sha512-4gl03wn3hj1HP3yzgdI7d3lCkF95F21Pz4BPGvKHinyQzALR5CapwC8yIi0Rh58DEMQ/SguC03wFj2k0M/mHhw==", + "dev": true, + "license": "MIT", + "dependencies": { + "lowercase-keys": "^2.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/restore-cursor": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-3.1.0.tgz", + "integrity": "sha512-l+sSefzHpj5qimhFSE5a8nufZYAM3sBSVMAPtYkmC+4EH2anSGaEMXSD0izRQbu9nfyQ9y5JrVmp7E8oZrUjvA==", + "dev": true, + "license": "MIT", + "dependencies": { + "onetime": "^5.1.0", + "signal-exit": "^3.0.2" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/retry": { + "version": "0.12.0", + "resolved": "https://registry.npmjs.org/retry/-/retry-0.12.0.tgz", + "integrity": "sha512-9LkiTwjUh6rT555DtE9rTX+BKByPfrMzEAtnlEtdEwr3Nkffwiihqe2bWADg+OQRjt9gl6ICdmB/ZFDCGAtSow==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 4" + } + }, + "node_modules/rimraf": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", + "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==", + "deprecated": "Rimraf versions prior to v4 are no longer supported", + "dev": true, + "license": "ISC", + "dependencies": { + "glob": "^7.1.3" + }, + "bin": { + "rimraf": "bin.js" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/roarr": { + "version": "2.15.4", + "resolved": "https://registry.npmjs.org/roarr/-/roarr-2.15.4.tgz", + "integrity": "sha512-CHhPh+UNHD2GTXNYhPWLnU8ONHdI+5DI+4EYIAOaiD63rHeYlZvyh8P+in5999TTSFgUYuKUAjzRI4mdh/p+2A==", + "dev": true, + "license": "BSD-3-Clause", + "optional": true, + "dependencies": { + "boolean": "^3.0.1", + "detect-node": "^2.0.4", + "globalthis": "^1.0.1", + "json-stringify-safe": "^5.0.1", + "semver-compare": "^1.0.0", + "sprintf-js": "^1.1.2" + }, + "engines": { + "node": ">=8.0" + } + }, + "node_modules/rollup": { + "version": "4.60.1", + "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.60.1.tgz", + "integrity": "sha512-VmtB2rFU/GroZ4oL8+ZqXgSA38O6GR8KSIvWmEFv63pQ0G6KaBH9s07PO8XTXP4vI+3UJUEypOfjkGfmSBBR0w==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/estree": "1.0.8" + }, + "bin": { + "rollup": "dist/bin/rollup" + }, + "engines": { + "node": ">=18.0.0", + "npm": ">=8.0.0" + }, + "optionalDependencies": { + "@rollup/rollup-android-arm-eabi": "4.60.1", + "@rollup/rollup-android-arm64": "4.60.1", + "@rollup/rollup-darwin-arm64": "4.60.1", + "@rollup/rollup-darwin-x64": "4.60.1", + "@rollup/rollup-freebsd-arm64": "4.60.1", + "@rollup/rollup-freebsd-x64": "4.60.1", + "@rollup/rollup-linux-arm-gnueabihf": "4.60.1", + "@rollup/rollup-linux-arm-musleabihf": "4.60.1", + "@rollup/rollup-linux-arm64-gnu": "4.60.1", + "@rollup/rollup-linux-arm64-musl": "4.60.1", + "@rollup/rollup-linux-loong64-gnu": "4.60.1", + "@rollup/rollup-linux-loong64-musl": "4.60.1", + "@rollup/rollup-linux-ppc64-gnu": "4.60.1", + "@rollup/rollup-linux-ppc64-musl": "4.60.1", + "@rollup/rollup-linux-riscv64-gnu": "4.60.1", + "@rollup/rollup-linux-riscv64-musl": "4.60.1", + "@rollup/rollup-linux-s390x-gnu": "4.60.1", + "@rollup/rollup-linux-x64-gnu": "4.60.1", + "@rollup/rollup-linux-x64-musl": "4.60.1", + "@rollup/rollup-openbsd-x64": "4.60.1", + "@rollup/rollup-openharmony-arm64": "4.60.1", + "@rollup/rollup-win32-arm64-msvc": "4.60.1", + "@rollup/rollup-win32-ia32-msvc": "4.60.1", + "@rollup/rollup-win32-x64-gnu": "4.60.1", + "@rollup/rollup-win32-x64-msvc": "4.60.1", + "fsevents": "~2.3.2" + } + }, + "node_modules/safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT" + }, + "node_modules/safer-buffer": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", + "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==", + "dev": true, + "license": "MIT" + }, + "node_modules/sanitize-filename": { + "version": "1.6.4", + "resolved": "https://registry.npmjs.org/sanitize-filename/-/sanitize-filename-1.6.4.tgz", + "integrity": "sha512-9ZyI08PsvdQl2r/bBIGubpVdR3RR9sY6RDiWFPreA21C/EFlQhmgo20UZlNjZMMZNubusLhAQozkA0Od5J21Eg==", + "dev": true, + "license": "WTFPL OR ISC", + "dependencies": { + "truncate-utf8-bytes": "^1.0.0" + } + }, + "node_modules/sax": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/sax/-/sax-1.6.0.tgz", + "integrity": "sha512-6R3J5M4AcbtLUdZmRv2SygeVaM7IhrLXu9BmnOGmmACak8fiUtOsYNWUS4uK7upbmHIBbLBeFeI//477BKLBzA==", + "license": "BlueOak-1.0.0", + "engines": { + "node": ">=11.0.0" + } + }, + "node_modules/scheduler": { + "version": "0.27.0", + "resolved": "https://registry.npmjs.org/scheduler/-/scheduler-0.27.0.tgz", + "integrity": "sha512-eNv+WrVbKu1f3vbYJT/xtiF5syA5HPIMtf9IgY/nKg0sWqzAUEvqY/xm7OcZc/qafLx/iO9FgOmeSAp4v5ti/Q==", + "license": "MIT" + }, + "node_modules/semver": { + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + } + }, + "node_modules/semver-compare": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/semver-compare/-/semver-compare-1.0.0.tgz", + "integrity": "sha512-YM3/ITh2MJ5MtzaM429anh+x2jiLVjqILF4m4oyQB18W7Ggea7BfqdH/wGMK7dDiMghv/6WG7znWMwUDzJiXow==", + "dev": true, + "license": "MIT", + "optional": true + }, + "node_modules/serialize-error": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/serialize-error/-/serialize-error-7.0.1.tgz", + "integrity": "sha512-8I8TjW5KMOKsZQTvoxjuSIa7foAwPWGOts+6o7sgjz41/qMD9VQHEDxi6PBvK2l0MXUmqZyNpUK+T2tQaaElvw==", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "type-fest": "^0.13.1" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/set-blocking": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/set-blocking/-/set-blocking-2.0.0.tgz", + "integrity": "sha512-KiKBS8AnWGEyLzofFfmvKwpdPzqiy16LvQfK3yv/fVH7Bj13/wl3JSR1J+rfgRE9q7xUJK4qvgS8raSOeLUehw==", + "dev": true, + "license": "ISC" + }, + "node_modules/set-cookie-parser": { + "version": "2.7.2", + "resolved": "https://registry.npmjs.org/set-cookie-parser/-/set-cookie-parser-2.7.2.tgz", + "integrity": "sha512-oeM1lpU/UvhTxw+g3cIfxXHyJRc/uidd3yK1P242gzHds0udQBYzs3y8j4gCCW+ZJ7ad0yctld8RYO+bdurlvw==", + "license": "MIT" + }, + "node_modules/shebang-command": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", + "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", + "dev": true, + "license": "MIT", + "dependencies": { + "shebang-regex": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/shebang-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", + "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/signal-exit": { + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz", + "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==", + "dev": true, + "license": "ISC" + }, + "node_modules/simple-update-notifier": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/simple-update-notifier/-/simple-update-notifier-2.0.0.tgz", + "integrity": "sha512-a2B9Y0KlNXl9u/vsW6sTIu9vGEpfKu2wRV6l1H3XEas/0gUIzGzBoP/IouTcUQbm9JWZLH3COxyn03TYlFax6w==", + "dev": true, + "license": "MIT", + "dependencies": { + "semver": "^7.5.3" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/simple-update-notifier/node_modules/semver": { + "version": "7.7.4", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.4.tgz", + "integrity": "sha512-vFKC2IEtQnVhpT78h1Yp8wzwrf8CM+MzKMHGJZfBtzhZNycRFnXsHk6E5TxIkkMsgNS7mdX3AGB7x2QM2di4lA==", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/slice-ansi": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/slice-ansi/-/slice-ansi-3.0.0.tgz", + "integrity": "sha512-pSyv7bSTC7ig9Dcgbw9AuRNUb5k5V6oDudjZoMBSr13qpLBG7tB+zgCkARjq7xIUgdz5P1Qe8u+rSGdouOOIyQ==", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "ansi-styles": "^4.0.0", + "astral-regex": "^2.0.0", + "is-fullwidth-code-point": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/smart-buffer": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/smart-buffer/-/smart-buffer-4.2.0.tgz", + "integrity": "sha512-94hK0Hh8rPqQl2xXc3HsaBoOXKV20MToPkcXvwbISWLEs+64sBq5kFgn2kJDHb1Pry9yrP0dxrCI9RRci7RXKg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 6.0.0", + "npm": ">= 3.0.0" + } + }, + "node_modules/socks": { + "version": "2.8.7", + "resolved": "https://registry.npmjs.org/socks/-/socks-2.8.7.tgz", + "integrity": "sha512-HLpt+uLy/pxB+bum/9DzAgiKS8CX1EvbWxI4zlmgGCExImLdiad2iCwXT5Z4c9c3Eq8rP2318mPW2c+QbtjK8A==", + "dev": true, + "license": "MIT", + "dependencies": { + "ip-address": "^10.0.1", + "smart-buffer": "^4.2.0" + }, + "engines": { + "node": ">= 10.0.0", + "npm": ">= 3.0.0" + } + }, + "node_modules/socks-proxy-agent": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/socks-proxy-agent/-/socks-proxy-agent-7.0.0.tgz", + "integrity": "sha512-Fgl0YPZ902wEsAyiQ+idGd1A7rSFx/ayC1CQVMw5P+EQx2V0SgpGtf6OKFhVjPflPUl9YMmEOnmfjCdMUsygww==", + "dev": true, + "license": "MIT", + "dependencies": { + "agent-base": "^6.0.2", + "debug": "^4.3.3", + "socks": "^2.6.2" + }, + "engines": { + "node": ">= 10" + } + }, + "node_modules/socks-proxy-agent/node_modules/agent-base": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.2.tgz", + "integrity": "sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "debug": "4" + }, + "engines": { + "node": ">= 6.0.0" + } + }, + "node_modules/source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true, + "license": "BSD-3-Clause", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/source-map-js": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.1.tgz", + "integrity": "sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==", + "dev": true, + "license": "BSD-3-Clause", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/source-map-support": { + "version": "0.5.21", + "resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.21.tgz", + "integrity": "sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w==", + "dev": true, + "license": "MIT", + "dependencies": { + "buffer-from": "^1.0.0", + "source-map": "^0.6.0" + } + }, + "node_modules/sprintf-js": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.1.3.tgz", + "integrity": "sha512-Oo+0REFV59/rz3gfJNKQiBlwfHaSESl1pcGyABQsnnIfWOFt6JNj5gCog2U6MLZ//IGYD+nA8nI+mTShREReaA==", + "dev": true, + "license": "BSD-3-Clause", + "optional": true + }, + "node_modules/ssri": { + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/ssri/-/ssri-9.0.1.tgz", + "integrity": "sha512-o57Wcn66jMQvfHG1FlYbWeZWW/dHZhJXjpIcTfXldXEk5nz5lStPo3mK0OJQfGR3RbZUlbISexbljkJzuEj/8Q==", + "dev": true, + "license": "ISC", + "dependencies": { + "minipass": "^3.1.1" + }, + "engines": { + "node": "^12.13.0 || ^14.15.0 || >=16.0.0" + } + }, + "node_modules/stat-mode": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/stat-mode/-/stat-mode-1.0.0.tgz", + "integrity": "sha512-jH9EhtKIjuXZ2cWxmXS8ZP80XyC3iasQxMDV8jzhNJpfDb7VbQLVW4Wvsxz9QZvzV+G4YoSfBUVKDOyxLzi/sg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 6" + } + }, + "node_modules/string_decoder": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz", + "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==", + "dev": true, + "license": "MIT", + "dependencies": { + "safe-buffer": "~5.2.0" + } + }, + "node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "license": "MIT", + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/string-width-cjs": { + "name": "string-width", + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "license": "MIT", + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-ansi-cjs": { + "name": "strip-ansi", + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/sumchecker": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/sumchecker/-/sumchecker-3.0.1.tgz", + "integrity": "sha512-MvjXzkz/BOfyVDkG0oFOtBxHX2u3gKbMHIF/dXblZsgD3BWOFLmHovIpZY7BykJdAjcqRCBi1WYBNdEC9yI7vg==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "debug": "^4.1.0" + }, + "engines": { + "node": ">= 8.0" + } + }, + "node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "license": "MIT", + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/tailwind-merge": { + "version": "3.5.0", + "resolved": "https://registry.npmjs.org/tailwind-merge/-/tailwind-merge-3.5.0.tgz", + "integrity": "sha512-I8K9wewnVDkL1NTGoqWmVEIlUcB9gFriAEkXkfCjX5ib8ezGxtR3xD7iZIxrfArjEsH7F1CHD4RFUtxefdqV/A==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/dcastil" + } + }, + "node_modules/tailwindcss": { + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/tailwindcss/-/tailwindcss-4.2.2.tgz", + "integrity": "sha512-KWBIxs1Xb6NoLdMVqhbhgwZf2PGBpPEiwOqgI4pFIYbNTfBXiKYyWoTsXgBQ9WFg/OlhnvHaY+AEpW7wSmFo2Q==", + "dev": true, + "license": "MIT" + }, + "node_modules/tapable": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/tapable/-/tapable-2.3.2.tgz", + "integrity": "sha512-1MOpMXuhGzGL5TTCZFItxCc0AARf1EZFQkGqMm7ERKj8+Hgr5oLvJOVFcC+lRmR8hCe2S3jC4T5D7Vg/d7/fhA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/webpack" + } + }, + "node_modules/tar": { + "version": "6.2.1", + "resolved": "https://registry.npmjs.org/tar/-/tar-6.2.1.tgz", + "integrity": "sha512-DZ4yORTwrbTj/7MZYq2w+/ZFdI6OZ/f9SFHR+71gIVUZhOQPHzVCLpvRnPgyaMpfWxxk/4ONva3GQSyNIKRv6A==", + "deprecated": "Old versions of tar are not supported, and contain widely publicized security vulnerabilities, which have been fixed in the current version. Please update. Support for old versions may be purchased (at exorbitant rates) by contacting i@izs.me", + "dev": true, + "license": "ISC", + "dependencies": { + "chownr": "^2.0.0", + "fs-minipass": "^2.0.0", + "minipass": "^5.0.0", + "minizlib": "^2.1.1", + "mkdirp": "^1.0.3", + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/tar/node_modules/minipass": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-5.0.0.tgz", + "integrity": "sha512-3FnjYuehv9k6ovOEbyOswadCDPX1piCfhV8ncmYtHOjuPwylVWsghTLo7rabjC3Rx5xD4HDx8Wm1xnMF7S5qFQ==", + "dev": true, + "license": "ISC", + "engines": { + "node": ">=8" + } + }, + "node_modules/tar/node_modules/yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", + "dev": true, + "license": "ISC" + }, + "node_modules/temp-file": { + "version": "3.4.0", + "resolved": "https://registry.npmjs.org/temp-file/-/temp-file-3.4.0.tgz", + "integrity": "sha512-C5tjlC/HCtVUOi3KWVokd4vHVViOmGjtLwIh4MuzPo/nMYTV/p1urt3RnMz2IWXDdKEGJH3k5+KPxtqRsUYGtg==", + "dev": true, + "license": "MIT", + "dependencies": { + "async-exit-hook": "^2.0.1", + "fs-extra": "^10.0.0" + } + }, + "node_modules/temp-file/node_modules/fs-extra": { + "version": "10.1.0", + "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-10.1.0.tgz", + "integrity": "sha512-oRXApq54ETRj4eMiFzGnHWGy+zo5raudjuxN0b8H7s/RU2oW0Wvsx9O0ACRN/kRq9E8Vu/ReskGB5o3ji+FzHQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "graceful-fs": "^4.2.0", + "jsonfile": "^6.0.1", + "universalify": "^2.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/temp-file/node_modules/jsonfile": { + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-6.2.0.tgz", + "integrity": "sha512-FGuPw30AdOIUTRMC2OMRtQV+jkVj2cfPqSeWXv1NEAJ1qZ5zb1X6z1mFhbfOB/iy3ssJCD+3KuZ8r8C3uVFlAg==", + "dev": true, + "license": "MIT", + "dependencies": { + "universalify": "^2.0.0" + }, + "optionalDependencies": { + "graceful-fs": "^4.1.6" + } + }, + "node_modules/temp-file/node_modules/universalify": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/universalify/-/universalify-2.0.1.tgz", + "integrity": "sha512-gptHNQghINnc/vTGIk0SOFGFNXw7JVrlRUtConJRlvaw6DuX0wO5Jeko9sWrMBhh+PsYAZ7oXAiOnf/UKogyiw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 10.0.0" + } + }, + "node_modules/tiny-typed-emitter": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/tiny-typed-emitter/-/tiny-typed-emitter-2.1.0.tgz", + "integrity": "sha512-qVtvMxeXbVej0cQWKqVSSAHmKZEHAvxdF8HEUBFWts8h+xEo5m/lEiPakuyZ3BnCBjOD8i24kzNOiOLLgsSxhA==", + "license": "MIT" + }, + "node_modules/tinyglobby": { + "version": "0.2.16", + "resolved": "https://registry.npmjs.org/tinyglobby/-/tinyglobby-0.2.16.tgz", + "integrity": "sha512-pn99VhoACYR8nFHhxqix+uvsbXineAasWm5ojXoN8xEwK5Kd3/TrhNn1wByuD52UxWRLy8pu+kRMniEi6Eq9Zg==", + "dev": true, + "license": "MIT", + "dependencies": { + "fdir": "^6.5.0", + "picomatch": "^4.0.4" + }, + "engines": { + "node": ">=12.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/SuperchupuDev" + } + }, + "node_modules/tmp": { + "version": "0.2.5", + "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.2.5.tgz", + "integrity": "sha512-voyz6MApa1rQGUxT3E+BK7/ROe8itEx7vD8/HEvt4xwXucvQ5G5oeEiHkmHZJuBO21RpOf+YYm9MOivj709jow==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=14.14" + } + }, + "node_modules/tmp-promise": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/tmp-promise/-/tmp-promise-3.0.3.tgz", + "integrity": "sha512-RwM7MoPojPxsOBYnyd2hy0bxtIlVrihNs9pj5SUvY8Zz1sQcQG2tG1hSr8PDxfgEB8RNKDhqbIlroIarSNDNsQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "tmp": "^0.2.0" + } + }, + "node_modules/truncate-utf8-bytes": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/truncate-utf8-bytes/-/truncate-utf8-bytes-1.0.2.tgz", + "integrity": "sha512-95Pu1QXQvruGEhv62XCMO3Mm90GscOCClvrIUwCM0PYOXK3kaF3l3sIHxx71ThJfcbM2O5Au6SO3AWCSEfW4mQ==", + "dev": true, + "license": "WTFPL", + "dependencies": { + "utf8-byte-length": "^1.0.1" + } + }, + "node_modules/tslib": { + "version": "2.8.1", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz", + "integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==", + "license": "0BSD" + }, + "node_modules/type-fest": { + "version": "0.13.1", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.13.1.tgz", + "integrity": "sha512-34R7HTnG0XIJcBSn5XhDd7nNFPRcXYRZrBB2O2jdKqYODldSzBAqzsWoZYYvduky73toYS/ESqxPvkDf/F0XMg==", + "dev": true, + "license": "(MIT OR CC0-1.0)", + "optional": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/typescript": { + "version": "5.9.3", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.9.3.tgz", + "integrity": "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==", + "dev": true, + "license": "Apache-2.0", + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, + "engines": { + "node": ">=14.17" + } + }, + "node_modules/undici-types": { + "version": "7.16.0", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-7.16.0.tgz", + "integrity": "sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw==", + "dev": true, + "license": "MIT" + }, + "node_modules/unique-filename": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/unique-filename/-/unique-filename-2.0.1.tgz", + "integrity": "sha512-ODWHtkkdx3IAR+veKxFV+VBkUMcN+FaqzUUd7IZzt+0zhDZFPFxhlqwPF3YQvMHx1TD0tdgYl+kuPnJ8E6ql7A==", + "dev": true, + "license": "ISC", + "dependencies": { + "unique-slug": "^3.0.0" + }, + "engines": { + "node": "^12.13.0 || ^14.15.0 || >=16.0.0" + } + }, + "node_modules/unique-slug": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/unique-slug/-/unique-slug-3.0.0.tgz", + "integrity": "sha512-8EyMynh679x/0gqE9fT9oilG+qEt+ibFyqjuVTsZn1+CMxH+XLlpvr2UZx4nVcCwTpx81nICr2JQFkM+HPLq4w==", + "dev": true, + "license": "ISC", + "dependencies": { + "imurmurhash": "^0.1.4" + }, + "engines": { + "node": "^12.13.0 || ^14.15.0 || >=16.0.0" + } + }, + "node_modules/universalify": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/universalify/-/universalify-0.1.2.tgz", + "integrity": "sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 4.0.0" + } + }, + "node_modules/update-browserslist-db": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.2.3.tgz", + "integrity": "sha512-Js0m9cx+qOgDxo0eMiFGEueWztz+d4+M3rGlmKPT+T4IS/jP4ylw3Nwpu6cpTTP8R1MAC1kF4VbdLt3ARf209w==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/browserslist" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "escalade": "^3.2.0", + "picocolors": "^1.1.1" + }, + "bin": { + "update-browserslist-db": "cli.js" + }, + "peerDependencies": { + "browserslist": ">= 4.21.0" + } + }, + "node_modules/uri-js": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", + "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "punycode": "^2.1.0" + } + }, + "node_modules/use-callback-ref": { + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/use-callback-ref/-/use-callback-ref-1.3.3.tgz", + "integrity": "sha512-jQL3lRnocaFtu3V00JToYz/4QkNWswxijDaCVNZRiRTO3HQDLsdu1ZtmIUvV4yPp+rvWm5j0y0TG/S61cuijTg==", + "license": "MIT", + "dependencies": { + "tslib": "^2.0.0" + }, + "engines": { + "node": ">=10" + }, + "peerDependencies": { + "@types/react": "*", + "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/use-sidecar": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/use-sidecar/-/use-sidecar-1.1.3.tgz", + "integrity": "sha512-Fedw0aZvkhynoPYlA5WXrMCAMm+nSWdZt6lzJQ7Ok8S6Q+VsHmHpRWndVRJ8Be0ZbkfPc5LRYH+5XrzXcEeLRQ==", + "license": "MIT", + "dependencies": { + "detect-node-es": "^1.1.0", + "tslib": "^2.0.0" + }, + "engines": { + "node": ">=10" + }, + "peerDependencies": { + "@types/react": "*", + "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/utf8-byte-length": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/utf8-byte-length/-/utf8-byte-length-1.0.5.tgz", + "integrity": "sha512-Xn0w3MtiQ6zoz2vFyUVruaCL53O/DwUvkEeOvj+uulMm0BkUGYWmBYVyElqZaSLhY6ZD0ulfU3aBra2aVT4xfA==", + "dev": true, + "license": "(WTFPL OR MIT)" + }, + "node_modules/util-deprecate": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", + "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==", + "dev": true, + "license": "MIT" + }, + "node_modules/verror": { + "version": "1.10.1", + "resolved": "https://registry.npmjs.org/verror/-/verror-1.10.1.tgz", + "integrity": "sha512-veufcmxri4e3XSrT0xwfUR7kguIkaxBeosDg00yDWhk49wdwkSUrvvsm7nc75e1PUyvIeZj6nS8VQRYz2/S4Xg==", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "assert-plus": "^1.0.0", + "core-util-is": "1.0.2", + "extsprintf": "^1.2.0" + }, + "engines": { + "node": ">=0.6.0" + } + }, + "node_modules/vite": { + "version": "6.4.2", + "resolved": "https://registry.npmjs.org/vite/-/vite-6.4.2.tgz", + "integrity": "sha512-2N/55r4JDJ4gdrCvGgINMy+HH3iRpNIz8K6SFwVsA+JbQScLiC+clmAxBgwiSPgcG9U15QmvqCGWzMbqda5zGQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "esbuild": "^0.25.0", + "fdir": "^6.4.4", + "picomatch": "^4.0.2", + "postcss": "^8.5.3", + "rollup": "^4.34.9", + "tinyglobby": "^0.2.13" + }, + "bin": { + "vite": "bin/vite.js" + }, + "engines": { + "node": "^18.0.0 || ^20.0.0 || >=22.0.0" + }, + "funding": { + "url": "https://github.com/vitejs/vite?sponsor=1" + }, + "optionalDependencies": { + "fsevents": "~2.3.3" + }, + "peerDependencies": { + "@types/node": "^18.0.0 || ^20.0.0 || >=22.0.0", + "jiti": ">=1.21.0", + "less": "*", + "lightningcss": "^1.21.0", + "sass": "*", + "sass-embedded": "*", + "stylus": "*", + "sugarss": "*", + "terser": "^5.16.0", + "tsx": "^4.8.1", + "yaml": "^2.4.2" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + }, + "jiti": { + "optional": true + }, + "less": { + "optional": true + }, + "lightningcss": { + "optional": true + }, + "sass": { + "optional": true + }, + "sass-embedded": { + "optional": true + }, + "stylus": { + "optional": true + }, + "sugarss": { + "optional": true + }, + "terser": { + "optional": true + }, + "tsx": { + "optional": true + }, + "yaml": { + "optional": true + } + } + }, + "node_modules/vite/node_modules/@esbuild/aix-ppc64": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.25.12.tgz", + "integrity": "sha512-Hhmwd6CInZ3dwpuGTF8fJG6yoWmsToE+vYgD4nytZVxcu1ulHpUQRAB1UJ8+N1Am3Mz4+xOByoQoSZf4D+CpkA==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "aix" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vite/node_modules/@esbuild/android-arm": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.25.12.tgz", + "integrity": "sha512-VJ+sKvNA/GE7Ccacc9Cha7bpS8nyzVv0jdVgwNDaR4gDMC/2TTRc33Ip8qrNYUcpkOHUT5OZ0bUcNNVZQ9RLlg==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vite/node_modules/@esbuild/android-arm64": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.25.12.tgz", + "integrity": "sha512-6AAmLG7zwD1Z159jCKPvAxZd4y/VTO0VkprYy+3N2FtJ8+BQWFXU+OxARIwA46c5tdD9SsKGZ/1ocqBS/gAKHg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vite/node_modules/@esbuild/android-x64": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.25.12.tgz", + "integrity": "sha512-5jbb+2hhDHx5phYR2By8GTWEzn6I9UqR11Kwf22iKbNpYrsmRB18aX/9ivc5cabcUiAT/wM+YIZ6SG9QO6a8kg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vite/node_modules/@esbuild/darwin-arm64": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.25.12.tgz", + "integrity": "sha512-N3zl+lxHCifgIlcMUP5016ESkeQjLj/959RxxNYIthIg+CQHInujFuXeWbWMgnTo4cp5XVHqFPmpyu9J65C1Yg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vite/node_modules/@esbuild/darwin-x64": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.25.12.tgz", + "integrity": "sha512-HQ9ka4Kx21qHXwtlTUVbKJOAnmG1ipXhdWTmNXiPzPfWKpXqASVcWdnf2bnL73wgjNrFXAa3yYvBSd9pzfEIpA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vite/node_modules/@esbuild/freebsd-arm64": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.25.12.tgz", + "integrity": "sha512-gA0Bx759+7Jve03K1S0vkOu5Lg/85dou3EseOGUes8flVOGxbhDDh/iZaoek11Y8mtyKPGF3vP8XhnkDEAmzeg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vite/node_modules/@esbuild/freebsd-x64": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.25.12.tgz", + "integrity": "sha512-TGbO26Yw2xsHzxtbVFGEXBFH0FRAP7gtcPE7P5yP7wGy7cXK2oO7RyOhL5NLiqTlBh47XhmIUXuGciXEqYFfBQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vite/node_modules/@esbuild/linux-arm": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.25.12.tgz", + "integrity": "sha512-lPDGyC1JPDou8kGcywY0YILzWlhhnRjdof3UlcoqYmS9El818LLfJJc3PXXgZHrHCAKs/Z2SeZtDJr5MrkxtOw==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vite/node_modules/@esbuild/linux-arm64": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.25.12.tgz", + "integrity": "sha512-8bwX7a8FghIgrupcxb4aUmYDLp8pX06rGh5HqDT7bB+8Rdells6mHvrFHHW2JAOPZUbnjUpKTLg6ECyzvas2AQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vite/node_modules/@esbuild/linux-ia32": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.25.12.tgz", + "integrity": "sha512-0y9KrdVnbMM2/vG8KfU0byhUN+EFCny9+8g202gYqSSVMonbsCfLjUO+rCci7pM0WBEtz+oK/PIwHkzxkyharA==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vite/node_modules/@esbuild/linux-loong64": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.25.12.tgz", + "integrity": "sha512-h///Lr5a9rib/v1GGqXVGzjL4TMvVTv+s1DPoxQdz7l/AYv6LDSxdIwzxkrPW438oUXiDtwM10o9PmwS/6Z0Ng==", + "cpu": [ + "loong64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vite/node_modules/@esbuild/linux-mips64el": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.25.12.tgz", + "integrity": "sha512-iyRrM1Pzy9GFMDLsXn1iHUm18nhKnNMWscjmp4+hpafcZjrr2WbT//d20xaGljXDBYHqRcl8HnxbX6uaA/eGVw==", + "cpu": [ + "mips64el" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vite/node_modules/@esbuild/linux-ppc64": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.25.12.tgz", + "integrity": "sha512-9meM/lRXxMi5PSUqEXRCtVjEZBGwB7P/D4yT8UG/mwIdze2aV4Vo6U5gD3+RsoHXKkHCfSxZKzmDssVlRj1QQA==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vite/node_modules/@esbuild/linux-riscv64": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.25.12.tgz", + "integrity": "sha512-Zr7KR4hgKUpWAwb1f3o5ygT04MzqVrGEGXGLnj15YQDJErYu/BGg+wmFlIDOdJp0PmB0lLvxFIOXZgFRrdjR0w==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vite/node_modules/@esbuild/linux-s390x": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.25.12.tgz", + "integrity": "sha512-MsKncOcgTNvdtiISc/jZs/Zf8d0cl/t3gYWX8J9ubBnVOwlk65UIEEvgBORTiljloIWnBzLs4qhzPkJcitIzIg==", + "cpu": [ + "s390x" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vite/node_modules/@esbuild/linux-x64": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.25.12.tgz", + "integrity": "sha512-uqZMTLr/zR/ed4jIGnwSLkaHmPjOjJvnm6TVVitAa08SLS9Z0VM8wIRx7gWbJB5/J54YuIMInDquWyYvQLZkgw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vite/node_modules/@esbuild/netbsd-x64": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.25.12.tgz", + "integrity": "sha512-Ld5pTlzPy3YwGec4OuHh1aCVCRvOXdH8DgRjfDy/oumVovmuSzWfnSJg+VtakB9Cm0gxNO9BzWkj6mtO1FMXkQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vite/node_modules/@esbuild/openbsd-x64": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.25.12.tgz", + "integrity": "sha512-MZyXUkZHjQxUvzK7rN8DJ3SRmrVrke8ZyRusHlP+kuwqTcfWLyqMOE3sScPPyeIXN/mDJIfGXvcMqCgYKekoQw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vite/node_modules/@esbuild/sunos-x64": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.25.12.tgz", + "integrity": "sha512-3wGSCDyuTHQUzt0nV7bocDy72r2lI33QL3gkDNGkod22EsYl04sMf0qLb8luNKTOmgF/eDEDP5BFNwoBKH441w==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "sunos" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vite/node_modules/@esbuild/win32-arm64": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.25.12.tgz", + "integrity": "sha512-rMmLrur64A7+DKlnSuwqUdRKyd3UE7oPJZmnljqEptesKM8wx9J8gx5u0+9Pq0fQQW8vqeKebwNXdfOyP+8Bsg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vite/node_modules/@esbuild/win32-ia32": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.25.12.tgz", + "integrity": "sha512-HkqnmmBoCbCwxUKKNPBixiWDGCpQGVsrQfJoVGYLPT41XWF8lHuE5N6WhVia2n4o5QK5M4tYr21827fNhi4byQ==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vite/node_modules/@esbuild/win32-x64": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.25.12.tgz", + "integrity": "sha512-alJC0uCZpTFrSL0CCDjcgleBXPnCrEAhTBILpeAp7M/OFgoqtAetfBzX0xM00MUsVVPpVjlPuMbREqnZCXaTnA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vite/node_modules/esbuild": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.25.12.tgz", + "integrity": "sha512-bbPBYYrtZbkt6Os6FiTLCTFxvq4tt3JKall1vRwshA3fdVztsLAatFaZobhkBC8/BrPetoa0oksYoKXoG4ryJg==", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "bin": { + "esbuild": "bin/esbuild" + }, + "engines": { + "node": ">=18" + }, + "optionalDependencies": { + "@esbuild/aix-ppc64": "0.25.12", + "@esbuild/android-arm": "0.25.12", + "@esbuild/android-arm64": "0.25.12", + "@esbuild/android-x64": "0.25.12", + "@esbuild/darwin-arm64": "0.25.12", + "@esbuild/darwin-x64": "0.25.12", + "@esbuild/freebsd-arm64": "0.25.12", + "@esbuild/freebsd-x64": "0.25.12", + "@esbuild/linux-arm": "0.25.12", + "@esbuild/linux-arm64": "0.25.12", + "@esbuild/linux-ia32": "0.25.12", + "@esbuild/linux-loong64": "0.25.12", + "@esbuild/linux-mips64el": "0.25.12", + "@esbuild/linux-ppc64": "0.25.12", + "@esbuild/linux-riscv64": "0.25.12", + "@esbuild/linux-s390x": "0.25.12", + "@esbuild/linux-x64": "0.25.12", + "@esbuild/netbsd-arm64": "0.25.12", + "@esbuild/netbsd-x64": "0.25.12", + "@esbuild/openbsd-arm64": "0.25.12", + "@esbuild/openbsd-x64": "0.25.12", + "@esbuild/openharmony-arm64": "0.25.12", + "@esbuild/sunos-x64": "0.25.12", + "@esbuild/win32-arm64": "0.25.12", + "@esbuild/win32-ia32": "0.25.12", + "@esbuild/win32-x64": "0.25.12" + } + }, + "node_modules/wcwidth": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/wcwidth/-/wcwidth-1.0.1.tgz", + "integrity": "sha512-XHPEwS0q6TaxcvG85+8EYkbiCux2XtWG2mkc47Ng2A77BQu9+DqIOJldST4HgPkuea7dvKSj5VgX3P1d4rW8Tg==", + "dev": true, + "license": "MIT", + "dependencies": { + "defaults": "^1.0.3" + } + }, + "node_modules/which": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "dev": true, + "license": "ISC", + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "node-which": "bin/node-which" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/wide-align": { + "version": "1.1.5", + "resolved": "https://registry.npmjs.org/wide-align/-/wide-align-1.1.5.tgz", + "integrity": "sha512-eDMORYaPNZ4sQIuuYPDHdQvf4gyCF9rEEV/yPxGfwPkRodwEgiMUUXTx/dex+Me0wxx53S+NgUHaP7y3MGlDmg==", + "dev": true, + "license": "ISC", + "dependencies": { + "string-width": "^1.0.2 || 2 || 3 || 4" + } + }, + "node_modules/wrap-ansi": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "node_modules/wrap-ansi-cjs": { + "name": "wrap-ansi", + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "node_modules/wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==", + "dev": true, + "license": "ISC" + }, + "node_modules/xmlbuilder": { + "version": "15.1.1", + "resolved": "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-15.1.1.tgz", + "integrity": "sha512-yMqGBqtXyeN1e3TGYvgNgDVZ3j84W4cwkOXQswghol6APgZWaff9lnbvN7MHYJOiXsvGPXtjTYJEiC9J2wv9Eg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8.0" + } + }, + "node_modules/y18n": { + "version": "5.0.8", + "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz", + "integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==", + "dev": true, + "license": "ISC", + "engines": { + "node": ">=10" + } + }, + "node_modules/yallist": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz", + "integrity": "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==", + "dev": true, + "license": "ISC" + }, + "node_modules/yargs": { + "version": "17.7.2", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-17.7.2.tgz", + "integrity": "sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==", + "dev": true, + "license": "MIT", + "dependencies": { + "cliui": "^8.0.1", + "escalade": "^3.1.1", + "get-caller-file": "^2.0.5", + "require-directory": "^2.1.1", + "string-width": "^4.2.3", + "y18n": "^5.0.5", + "yargs-parser": "^21.1.1" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/yargs-parser": { + "version": "21.1.1", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.1.1.tgz", + "integrity": "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==", + "dev": true, + "license": "ISC", + "engines": { + "node": ">=12" + } + }, + "node_modules/yauzl": { + "version": "2.10.0", + "resolved": "https://registry.npmjs.org/yauzl/-/yauzl-2.10.0.tgz", + "integrity": "sha512-p4a9I6X6nu6IhoGmBqAcbJy1mlC4j27vEPZX9F4L4/vZT3Lyq1VkFHw/V/PUcB9Buo+DG3iHkT0x3Qya58zc3g==", + "dev": true, + "license": "MIT", + "dependencies": { + "buffer-crc32": "~0.2.3", + "fd-slicer": "~1.1.0" + } + }, + "node_modules/yocto-queue": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", + "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + } + } +} diff --git a/desktop/package.json b/desktop/package.json new file mode 100644 index 0000000..ae07e75 --- /dev/null +++ b/desktop/package.json @@ -0,0 +1,58 @@ +{ + "name": "flowkit-desktop", + "version": "0.2.0", + "description": "FlowKit Desktop — AI Video Generation", + "main": "out/main/main.js", + "private": true, + "scripts": { + "dev": "electron-vite dev", + "build": "electron-vite build", + "prepare:upscale-runtime": "node ./scripts/prepare-upscale-runtime.mjs", + "prepare:upscale-runtime:all": "node ./scripts/prepare-upscale-runtime.mjs --platform=all", + "dist": "npm run prepare:upscale-runtime && npm run build && electron-builder", + "dist:mac": "npm run prepare:upscale-runtime && npm run build && electron-builder --mac", + "dist:win": "node ./scripts/preflight-win-build.mjs && npm run prepare:upscale-runtime -- --platform=win32 && npm run build && electron-builder --win", + "build:win": "npm run dist:win", + "build:mac": "npm run dist:mac", + "preview": "electron-vite preview" + }, + "dependencies": { + "@fontsource/inter": "^5.2.8", + "@radix-ui/react-dialog": "^1.1.15", + "@radix-ui/react-dropdown-menu": "^2.1.16", + "@radix-ui/react-label": "^2.1.8", + "@radix-ui/react-popover": "^1.1.15", + "@radix-ui/react-progress": "^1.1.8", + "@radix-ui/react-scroll-area": "^1.2.10", + "@radix-ui/react-select": "^2.2.6", + "@radix-ui/react-separator": "^1.1.8", + "@radix-ui/react-slot": "^1.2.4", + "@radix-ui/react-switch": "^1.2.6", + "@radix-ui/react-tabs": "^1.1.13", + "@radix-ui/react-toast": "^1.2.15", + "@radix-ui/react-tooltip": "^1.2.8", + "class-variance-authority": "^0.7.1", + "clsx": "^2.1.1", + "electron-updater": "^6.3.4", + "lucide-react": "^1.7.0", + "react": "^19.2.4", + "react-dom": "^19.2.4", + "react-router-dom": "^7.14.0", + "tailwind-merge": "^3.5.0" + }, + "devDependencies": { + "@electron-toolkit/preload": "^3.0.1", + "@electron-toolkit/utils": "^3.0.0", + "@tailwindcss/vite": "^4.2.2", + "@types/node": "^24.12.0", + "@types/react": "^19.2.14", + "@types/react-dom": "^19.2.3", + "@vitejs/plugin-react": "^4.3.4", + "electron": "^36.1.0", + "electron-builder": "^25.1.8", + "electron-vite": "^2.3.0", + "tailwindcss": "^4.2.2", + "typescript": "~5.9.3", + "vite": "^6.3.3" + } +} diff --git a/desktop/resources/ffmpeg/.gitkeep b/desktop/resources/ffmpeg/.gitkeep new file mode 100644 index 0000000..e69de29 diff --git a/desktop/resources/icon.ico b/desktop/resources/icon.ico new file mode 100644 index 0000000..e5a036e Binary files /dev/null and b/desktop/resources/icon.ico differ diff --git a/desktop/resources/icon.png b/desktop/resources/icon.png new file mode 100644 index 0000000..3613d14 Binary files /dev/null and b/desktop/resources/icon.png differ diff --git a/desktop/scripts/preflight-win-build.mjs b/desktop/scripts/preflight-win-build.mjs new file mode 100644 index 0000000..df9d77e --- /dev/null +++ b/desktop/scripts/preflight-win-build.mjs @@ -0,0 +1,14 @@ +#!/usr/bin/env node + +const allowCross = process.env.FLOWKIT_ALLOW_CROSS_WIN === "1"; + +if (process.platform !== "win32" && !allowCross) { + console.error( + "[dist:win] Build Windows trên non-Windows bị chặn để tránh package lỗi (agent binary sai định dạng).", + ); + console.error( + "[dist:win] Hãy build trên Windows runner (GitHub Actions build-windows) hoặc máy Windows local.", + ); + process.exit(1); +} + diff --git a/desktop/scripts/prepare-upscale-runtime.mjs b/desktop/scripts/prepare-upscale-runtime.mjs new file mode 100644 index 0000000..9eeeab1 --- /dev/null +++ b/desktop/scripts/prepare-upscale-runtime.mjs @@ -0,0 +1,518 @@ +#!/usr/bin/env node + +import { spawnSync } from "node:child_process"; +import { randomUUID } from "node:crypto"; +import { createWriteStream, existsSync } from "node:fs"; +import { access, chmod, copyFile, cp, mkdir, readdir, readFile, rm, writeFile } from "node:fs/promises"; +import { tmpdir } from "node:os"; +import { dirname, join, resolve } from "node:path"; +import { pipeline } from "node:stream/promises"; +import { fileURLToPath } from "node:url"; + +const __filename = fileURLToPath(import.meta.url); +const __dirname = dirname(__filename); + +const desktopRoot = resolve(__dirname, ".."); +const runtimeBaseRoot = join(desktopRoot, "resources", "agent", "third_party"); +const projectRuntimeBaseRoot = resolve(desktopRoot, "..", "third_party"); +const runtimeIndexManifestPath = join(runtimeBaseRoot, "runtime-manifest.json"); + +const args = process.argv.slice(2); +const force = args.includes("--force") || process.env.FLOWKIT_UPSCALE_FORCE === "1"; +const downloadTimeoutMs = Math.max( + 60_000, + Number.parseInt(process.env.FLOWKIT_DOWNLOAD_TIMEOUT_MS || "", 10) || 20 * 60 * 1000, +); +const explicitPlatformArg = args.find((arg) => arg.startsWith("--platform=")); +const explicitPlatform = explicitPlatformArg ? explicitPlatformArg.slice("--platform=".length).trim() : ""; + +const RUNTIME_SPECS = { + win32: { + ffmpegZipUrls: [ + "https://www.gyan.dev/ffmpeg/builds/ffmpeg-release-essentials.zip", + "https://github.com/BtbN/FFmpeg-Builds/releases/download/latest/ffmpeg-master-latest-win64-gpl.zip", + ], + realesrganZipUrls: [ + "https://github.com/xinntao/Real-ESRGAN/releases/download/v0.3.0/realesrgan-ncnn-windows.zip", + "https://github.com/xinntao/Real-ESRGAN/releases/download/v0.2.5.0/realesrgan-ncnn-vulkan-20220424-windows.zip", + ], + ffmpegBin: "ffmpeg.exe", + ffprobeBin: "ffprobe.exe", + realesrganBin: "realesrgan-ncnn-vulkan.exe", + }, + darwin: { + ffmpegZipUrls: ["https://evermeet.cx/ffmpeg/getrelease/ffmpeg/zip"], + ffprobeZipUrls: ["https://evermeet.cx/ffmpeg/getrelease/ffprobe/zip"], + realesrganZipUrls: [ + "https://github.com/xinntao/Real-ESRGAN/releases/download/v0.3.0/realesrgan-ncnn-macos.zip", + "https://github.com/xinntao/Real-ESRGAN/releases/download/v0.2.5.0/realesrgan-ncnn-vulkan-20220424-macos.zip", + ], + ffmpegBin: "ffmpeg", + ffprobeBin: "ffprobe", + realesrganBin: "realesrgan-ncnn-vulkan", + }, +}; + +function normalizePlatform(raw) { + const value = String(raw || "").trim().toLowerCase(); + if (!value) return ""; + if (["win", "win32", "windows", "win64"].includes(value)) return "win32"; + if (["darwin", "mac", "macos", "osx", "os-x"].includes(value)) return "darwin"; + return value; +} + +function resolveTargetPlatforms(raw) { + const supported = Object.keys(RUNTIME_SPECS); + const fallback = normalizePlatform(process.platform); + const text = String(raw || "").trim().toLowerCase(); + if (!text) return [fallback]; + if (text === "all") return supported; + const tokens = text + .split(",") + .map((item) => normalizePlatform(item)) + .filter(Boolean); + const deduped = Array.from(new Set(tokens)); + if (!deduped.length) return [fallback]; + const unsupported = deduped.filter((platform) => !supported.includes(platform)); + if (unsupported.length) { + throw new Error( + `Unsupported platform(s): ${unsupported.join(", ")}. Supported: ${supported.join(", ")} or "all".`, + ); + } + return deduped; +} + +const targetPlatforms = resolveTargetPlatforms( + explicitPlatform || process.env.FLOWKIT_UPSCALE_PLATFORM || process.platform, +); + +function log(message) { + process.stdout.write(`[prepare-upscale-runtime] ${message}\n`); +} + +async function ensureDir(path) { + await mkdir(path, { recursive: true }); +} + +async function pathExists(path) { + try { + await access(path); + return true; + } catch { + return false; + } +} + +async function hasRuntimeAtRoot(root, spec) { + const ffmpegTarget = join(root, "ffmpeg", spec.ffmpegBin); + const ffprobeTarget = join(root, "ffmpeg", spec.ffprobeBin); + const realesrganTarget = join(root, "realesrgan", spec.realesrganBin); + const modelParamTarget = join(root, "realesrgan", "models", "realesrgan-x4plus.param"); + const modelBinTarget = join(root, "realesrgan", "models", "realesrgan-x4plus.bin"); + return ( + (await pathExists(ffmpegTarget)) && + (await pathExists(ffprobeTarget)) && + (await pathExists(realesrganTarget)) && + (await pathExists(modelParamTarget)) && + (await pathExists(modelBinTarget)) + ); +} + +async function downloadFile(url, destPath) { + const controller = new AbortController(); + const timer = setTimeout(() => controller.abort(), downloadTimeoutMs); + try { + const response = await fetch(url, { redirect: "follow", signal: controller.signal }); + if (!response.ok || !response.body) { + throw new Error(`Download failed (${response.status}) from ${url}`); + } + const out = createWriteStream(destPath); + await pipeline(response.body, out); + } finally { + clearTimeout(timer); + } +} + +async function downloadFirstAvailable(urls, destPath, label) { + if (!urls?.length) throw new Error(`No download URLs configured for ${label}`); + let lastError = ""; + for (const url of urls) { + try { + log(`Downloading ${label} from ${url}`); + await downloadFile(url, destPath); + return url; + } catch (error) { + lastError = error instanceof Error ? error.message : String(error); + log(`Download failed for ${label} (${url}): ${lastError}`); + } + } + throw new Error(`All mirrors failed for ${label}: ${lastError}`); +} + +function runCommand(command, commandArgs) { + const result = spawnSync(command, commandArgs, { stdio: "inherit" }); + if (result.status !== 0) { + throw new Error(`Command failed: ${command} ${commandArgs.join(" ")}`); + } +} + +function extractZip(zipPath, outputDir) { + if (process.platform === "win32") { + const escapedZip = zipPath.replace(/'/g, "''"); + const escapedOut = outputDir.replace(/'/g, "''"); + runCommand("powershell.exe", [ + "-NoLogo", + "-NoProfile", + "-Command", + `Expand-Archive -Path '${escapedZip}' -DestinationPath '${escapedOut}' -Force`, + ]); + return; + } + runCommand("unzip", ["-o", zipPath, "-d", outputDir]); +} + +async function findFile(rootDir, fileName) { + const stack = [rootDir]; + while (stack.length) { + const current = stack.pop(); + const entries = await readdir(current, { withFileTypes: true }); + for (const entry of entries) { + const full = join(current, entry.name); + if (entry.isDirectory()) { + stack.push(full); + continue; + } + if (entry.isFile() && entry.name.toLowerCase() === fileName.toLowerCase()) { + return full; + } + } + } + return null; +} + +async function findModels(rootDir) { + const found = []; + const stack = [rootDir]; + while (stack.length) { + const current = stack.pop(); + const entries = await readdir(current, { withFileTypes: true }); + for (const entry of entries) { + const full = join(current, entry.name); + if (entry.isDirectory()) { + stack.push(full); + continue; + } + if (!entry.isFile()) continue; + if (!entry.name.endsWith(".param") && !entry.name.endsWith(".bin")) continue; + found.push(full); + } + } + return found; +} + +async function findDynamicLibs(rootDir) { + const found = []; + const stack = [rootDir]; + while (stack.length) { + const current = stack.pop(); + const entries = await readdir(current, { withFileTypes: true }); + for (const entry of entries) { + const full = join(current, entry.name); + if (entry.isDirectory()) { + stack.push(full); + continue; + } + if (!entry.isFile()) continue; + const lower = entry.name.toLowerCase(); + if ( + lower.endsWith(".dylib") || + lower.endsWith(".dll") || + lower.endsWith(".so") || + lower.includes(".so.") + ) { + found.push(full); + } + } + } + return found; +} + +async function copyExecutable(src, dest, platform) { + await copyFile(src, dest); + if (platform !== "win32") { + await chmod(dest, 0o755); + } +} + +function getRuntimePathsForPlatform(platform) { + const runtimeRoot = join(runtimeBaseRoot, platform); + const ffmpegRoot = join(runtimeRoot, "ffmpeg"); + const realesrganRoot = join(runtimeRoot, "realesrgan"); + const realesrganModelsRoot = join(realesrganRoot, "models"); + const manifestPath = join(runtimeRoot, "runtime-manifest.json"); + return { + runtimeRoot, + ffmpegRoot, + realesrganRoot, + realesrganModelsRoot, + manifestPath, + }; +} + +async function readManifest(path) { + try { + const raw = await readFile(path, "utf-8"); + return JSON.parse(raw); + } catch { + return null; + } +} + +async function writeManifest(path, manifest) { + await ensureDir(dirname(path)); + await writeFile(path, JSON.stringify(manifest, null, 2), "utf-8"); +} + +async function mirrorRuntimeToProjectRoot(runtimeRoot, targetPlatform) { + const platformRoot = join(projectRuntimeBaseRoot, targetPlatform); + await ensureDir(projectRuntimeBaseRoot); + await rm(platformRoot, { recursive: true, force: true }); + await cp(runtimeRoot, platformRoot, { recursive: true, force: true }); + log(`Mirrored runtime (${targetPlatform}) to ${platformRoot}`); +} + +async function cleanupLegacyFlatRuntime(baseRoot, targetPlatform, spec) { + const platformRoot = join(baseRoot, targetPlatform); + if (!(await hasRuntimeAtRoot(platformRoot, spec))) { + return; + } + const legacyFfmpeg = join(baseRoot, "ffmpeg"); + const legacyRealesrgan = join(baseRoot, "realesrgan"); + if (await pathExists(legacyFfmpeg)) { + await rm(legacyFfmpeg, { recursive: true, force: true }); + } + if (await pathExists(legacyRealesrgan)) { + await rm(legacyRealesrgan, { recursive: true, force: true }); + } +} + +async function writeRuntimeIndexManifest(items) { + const payload = { + preparedAt: new Date().toISOString(), + hostPlatform: normalizePlatform(process.platform), + targets: items, + }; + await writeManifest(runtimeIndexManifestPath, payload); +} + +async function prepareRuntimeForPlatform(targetPlatform) { + const spec = RUNTIME_SPECS[targetPlatform]; + if (!spec) { + log(`Skip: unsupported platform "${targetPlatform}". Supported: ${Object.keys(RUNTIME_SPECS).join(", ")}`); + return null; + } + + const { runtimeRoot, ffmpegRoot, realesrganRoot, realesrganModelsRoot, manifestPath } = + getRuntimePathsForPlatform(targetPlatform); + + await ensureDir(ffmpegRoot); + await ensureDir(realesrganModelsRoot); + + const ffmpegTarget = join(ffmpegRoot, spec.ffmpegBin); + const ffprobeTarget = join(ffmpegRoot, spec.ffprobeBin); + const realesrganTarget = join(realesrganRoot, spec.realesrganBin); + const modelParamTarget = join(realesrganModelsRoot, "realesrgan-x4plus.param"); + const modelBinTarget = join(realesrganModelsRoot, "realesrgan-x4plus.bin"); + + let existingReady = + (await pathExists(ffmpegTarget)) && + (await pathExists(ffprobeTarget)) && + (await pathExists(realesrganTarget)) && + (await pathExists(modelParamTarget)) && + (await pathExists(modelBinTarget)); + + // One-time migration path: old runtime layout stored binaries directly at + // third_party/{ffmpeg,realesrgan}. Migrate to third_party//... + if (!existingReady) { + const legacyReady = await hasRuntimeAtRoot(runtimeBaseRoot, spec); + if (legacyReady) { + log(`Migrating legacy runtime layout to platform folder (${targetPlatform})...`); + await cp(join(runtimeBaseRoot, "ffmpeg"), ffmpegRoot, { recursive: true, force: true }); + await cp(join(runtimeBaseRoot, "realesrgan"), realesrganRoot, { recursive: true, force: true }); + existingReady = await hasRuntimeAtRoot(runtimeRoot, spec); + if (existingReady) { + log(`Legacy runtime migrated to ${runtimeRoot}`); + } + } + } + + const manifest = await readManifest(manifestPath); + const specSignature = JSON.stringify({ + targetPlatform, + ffmpegZipUrls: spec.ffmpegZipUrls, + ffprobeZipUrls: spec.ffprobeZipUrls || null, + realesrganZipUrls: spec.realesrganZipUrls, + }); + const manifestMatches = manifest?.specSignature === specSignature; + + if (!force && existingReady && (manifestMatches || !manifest)) { + if (!manifestMatches) { + await writeManifest(manifestPath, { + preparedAt: new Date().toISOString(), + targetPlatform, + specSignature, + runtimeRoot, + ffmpegBin: join("ffmpeg", spec.ffmpegBin), + ffprobeBin: join("ffmpeg", spec.ffprobeBin), + realesrganBin: join("realesrgan", spec.realesrganBin), + modelDir: join("realesrgan", "models"), + }); + } + const mirroredProbe = join(projectRuntimeBaseRoot, targetPlatform, "realesrgan", "models", "realesrgan-x4plus.param"); + if (!(await pathExists(mirroredProbe))) { + await mirrorRuntimeToProjectRoot(runtimeRoot, targetPlatform); + } + await cleanupLegacyFlatRuntime(runtimeBaseRoot, targetPlatform, spec); + await cleanupLegacyFlatRuntime(projectRuntimeBaseRoot, targetPlatform, spec); + log(`Runtime already prepared for ${targetPlatform}. Use --force to refresh.`); + return { + platform: targetPlatform, + ffmpegBin: ffmpegTarget, + ffprobeBin: ffprobeTarget, + realesrganBin: realesrganTarget, + modelDir: realesrganModelsRoot, + status: manifestMatches ? "cached" : "migrated_cached", + }; + } + + const scratchDir = join(tmpdir(), `flowkit-upscale-${randomUUID()}`); + await ensureDir(scratchDir); + + try { + log(`Preparing runtime for ${targetPlatform}...`); + + const ffmpegZip = join(scratchDir, "ffmpeg.zip"); + await downloadFirstAvailable(spec.ffmpegZipUrls, ffmpegZip, "ffmpeg bundle"); + + const ffmpegExtractDir = join(scratchDir, "ffmpeg"); + await ensureDir(ffmpegExtractDir); + extractZip(ffmpegZip, ffmpegExtractDir); + + const ffmpegSrc = await findFile(ffmpegExtractDir, spec.ffmpegBin); + if (!ffmpegSrc) { + throw new Error("Could not locate ffmpeg binary in downloaded archive."); + } + let ffprobeSrc = await findFile(ffmpegExtractDir, spec.ffprobeBin); + await copyExecutable(ffmpegSrc, ffmpegTarget, targetPlatform); + if (ffprobeSrc) { + await copyExecutable(ffprobeSrc, ffprobeTarget, targetPlatform); + log(`Bundled ${spec.ffmpegBin} + ${spec.ffprobeBin}`); + } else { + log(`${spec.ffprobeBin} not found in ffmpeg bundle, fallback to dedicated ffprobe package...`); + } + + if (spec.ffprobeZipUrls?.length) { + const ffprobeZip = join(scratchDir, "ffprobe.zip"); + await downloadFirstAvailable(spec.ffprobeZipUrls, ffprobeZip, "ffprobe bundle"); + const ffprobeExtractDir = join(scratchDir, "ffprobe"); + await ensureDir(ffprobeExtractDir); + extractZip(ffprobeZip, ffprobeExtractDir); + const standaloneProbe = await findFile(ffprobeExtractDir, spec.ffprobeBin); + if (!standaloneProbe) { + throw new Error(`Could not locate ${spec.ffprobeBin} in dedicated ffprobe package.`); + } + await copyExecutable(standaloneProbe, ffprobeTarget, targetPlatform); + ffprobeSrc = standaloneProbe; + log(`Bundled ${spec.ffprobeBin} from dedicated package`); + } + + if (!(await pathExists(ffprobeTarget))) { + throw new Error(`${spec.ffprobeBin} is missing after extraction.`); + } + + const realesrganZip = join(scratchDir, "realesrgan.zip"); + await downloadFirstAvailable(spec.realesrganZipUrls, realesrganZip, "Real-ESRGAN bundle"); + + const realesrganExtractDir = join(scratchDir, "realesrgan"); + await ensureDir(realesrganExtractDir); + extractZip(realesrganZip, realesrganExtractDir); + + const realesrganSrc = await findFile(realesrganExtractDir, spec.realesrganBin); + if (!realesrganSrc) { + throw new Error("Could not locate realesrgan binary in downloaded archive."); + } + await copyExecutable(realesrganSrc, realesrganTarget, targetPlatform); + + const dynamicLibs = await findDynamicLibs(realesrganExtractDir); + for (const libPath of dynamicLibs) { + const libName = libPath.split(/[\\/]/).pop(); + if (!libName) continue; + const libTarget = join(realesrganRoot, libName); + await copyFile(libPath, libTarget); + if (targetPlatform !== "win32") await chmod(libTarget, 0o755); + } + if (dynamicLibs.length > 0) { + log(`Bundled ${dynamicLibs.length} runtime library file(s) for Real-ESRGAN`); + } + + const models = await findModels(realesrganExtractDir); + if (!models.length) { + throw new Error("Could not locate Real-ESRGAN model files (.param/.bin)."); + } + + for (const modelPath of models) { + const target = join(realesrganModelsRoot, modelPath.split(/[\\/]/).pop()); + await copyFile(modelPath, target); + } + + if (!(await pathExists(modelParamTarget)) || !(await pathExists(modelBinTarget))) { + throw new Error("Required model realesrgan-x4plus.[param|bin] not found after extraction."); + } + + await writeManifest(manifestPath, { + preparedAt: new Date().toISOString(), + targetPlatform, + specSignature, + runtimeRoot, + ffmpegBin: join("ffmpeg", spec.ffmpegBin), + ffprobeBin: join("ffmpeg", spec.ffprobeBin), + realesrganBin: join("realesrgan", spec.realesrganBin), + modelDir: join("realesrgan", "models"), + }); + + await mirrorRuntimeToProjectRoot(runtimeRoot, targetPlatform); + await cleanupLegacyFlatRuntime(runtimeBaseRoot, targetPlatform, spec); + await cleanupLegacyFlatRuntime(projectRuntimeBaseRoot, targetPlatform, spec); + + log(`Runtime prepared successfully for ${targetPlatform}.`); + return { + platform: targetPlatform, + ffmpegBin: ffmpegTarget, + ffprobeBin: ffprobeTarget, + realesrganBin: realesrganTarget, + modelDir: realesrganModelsRoot, + status: "prepared", + }; + } finally { + if (existsSync(scratchDir)) { + await rm(scratchDir, { recursive: true, force: true }); + } + } +} + +async function main() { + log(`Target runtime platform(s): ${targetPlatforms.join(", ")}`); + const results = []; + for (const platform of targetPlatforms) { + const prepared = await prepareRuntimeForPlatform(platform); + if (prepared) { + results.push(prepared); + } + } + await writeRuntimeIndexManifest(results); +} + +main().catch((error) => { + console.error(`[prepare-upscale-runtime] ERROR: ${error instanceof Error ? error.message : String(error)}`); + process.exitCode = 1; +}); diff --git a/desktop/src/App.tsx b/desktop/src/App.tsx new file mode 100644 index 0000000..3eaf63c --- /dev/null +++ b/desktop/src/App.tsx @@ -0,0 +1,554 @@ +import { BrowserRouter, NavLink, Routes, Route, useLocation } from 'react-router-dom' +import { + LayoutDashboard, + FolderOpen, + ScrollText, + Film, + Globe, + Settings, + ImagePlus, + Clapperboard, + ShieldAlert, + Copy, + RefreshCw, + Eye, + EyeOff, + Minus, + Square, + X, +} from 'lucide-react' +import { useState, useEffect, useCallback } from 'react' +import { useWebSocket } from './api/useWebSocket' +import { useExtensionStatus } from './api/useExtensionStatus' +import { fetchAPI } from './api/client' +import { Badge } from './components/ui/badge' +import { Button } from './components/ui/button' +import { Toaster } from './components/ui/toaster' +import DashboardPage from './pages/DashboardPage' +import ProjectsPage from './pages/ProjectsPage' +import LogsPage from './pages/LogsPage' +import GalleryPage from './pages/GalleryPage' +import SettingsPage from './pages/SettingsPage' +import ManualImagesPage from './pages/ManualImagesPage' +import ManualVideosPage from './pages/ManualVideosPage' + +type LicenseStatus = 'ACTIVE' | 'EXPIRED' | 'REVOKED' | 'PENDING' | 'ERROR' + +interface LicenseCheckResult { + allowed: boolean + status: LicenseStatus + machineId: string + machineHash: string | null + planCode: string | null + planLabel: string | null + activatedAt: string | null + expiresAt: string | null + revokedReason: string | null + checkedAt: string + serverTime: string | null + source: 'remote' | 'cache' + apiBaseUrl: string + message: string +} + +const NAV = [ + { to: '/', icon: LayoutDashboard, label: 'Tổng quan', exact: true }, + { to: '/projects', icon: FolderOpen, label: 'Dự án', exact: false }, + { to: '/manual-images', icon: ImagePlus, label: 'Tạo ảnh', exact: false }, + { to: '/manual-videos', icon: Clapperboard, label: 'Tạo video', exact: false }, + { to: '/logs', icon: ScrollText, label: 'Nhật ký', exact: false }, + { to: '/gallery', icon: Film, label: 'Thư viện', exact: false }, + { to: '/settings', icon: Settings, label: 'Cài đặt', exact: false }, +] + +const LICENSE_STATUS_LABEL: Record = { + ACTIVE: 'Đã kích hoạt', + EXPIRED: 'Đã hết hạn', + REVOKED: 'Đã thu hồi', + PENDING: 'Chưa kích hoạt', + ERROR: 'Lỗi kết nối', +} + +function formatDateTime(value: string | null): string { + if (!value) return '—' + const date = new Date(value) + if (Number.isNaN(date.getTime())) return value + return date.toLocaleString('vi-VN') +} + +function statusVariant(status: LicenseStatus): 'success' | 'destructive' | 'secondary' { + if (status === 'ACTIVE') return 'success' + if (status === 'ERROR') return 'secondary' + return 'destructive' +} + +function PageTitle() { + const loc = useLocation() + const match = NAV.find(n => n.exact ? loc.pathname === n.to : loc.pathname.startsWith(n.to)) + return {match?.label ?? 'Tổng quan'} +} + +function AgentStatusBadge() { + const [status, setStatus] = useState('Đang khởi động...') + + useEffect(() => { + if (window.electron?.onAgentStatus) { + const unsub = window.electron.onAgentStatus(setStatus) + return unsub + } + }, []) + + const variant = status === 'Ready' + ? 'success' + : status.startsWith('Error') + ? 'destructive' + : 'secondary' + + return ( + + Agent · {status === 'Ready' ? 'Sẵn sàng' : status} + + ) +} + +function AppWindowHeader() { + const platform = window.electron?.platform + const isMac = platform === 'darwin' + const [isMaximized, setIsMaximized] = useState(false) + + const refreshMaximized = useCallback(() => { + window.electron?.isWindowMaximized?.() + .then((maximized) => setIsMaximized(Boolean(maximized))) + .catch(() => { }) + }, []) + + useEffect(() => { + refreshMaximized() + window.addEventListener('resize', refreshMaximized) + return () => window.removeEventListener('resize', refreshMaximized) + }, [refreshMaximized]) + + const minimize = async () => { + await window.electron?.windowMinimize?.() + } + + const toggleMaximize = async () => { + await window.electron?.windowToggleMaximize?.() + refreshMaximized() + } + + const closeWindow = async () => { + await window.electron?.windowClose?.() + } + + return ( +
+
FlowKit
+ {!isMac && ( +
+ + + +
+ )} +
+ ) +} + +function Layout() { + const { isConnected } = useWebSocket() + const { connected: extensionConnected } = useExtensionStatus() + const [appVersion, setAppVersion] = useState('0.2.0') + const [flowPanelVisible, setFlowPanelVisible] = useState(true) + const location = useLocation() + + const openFlowTab = () => window.electron?.openFlowTab({ focus: true, reveal: true }) + const toggleFlowPanel = async () => { + try { + const next = await window.electron?.toggleFlowPanel?.() + if (next) setFlowPanelVisible(Boolean(next.visible)) + } catch { + // no-op + } + } + + useEffect(() => { + window.electron?.getAppInfo?.() + .then((info) => { + if (info?.version) setAppVersion(info.version) + }) + .catch(() => { }) + }, []) + + useEffect(() => { + let mounted = true + window.electron?.getFlowPanelState?.() + .then((state) => { + if (!mounted || !state) return + setFlowPanelVisible(Boolean(state.visible)) + }) + .catch(() => { }) + const unsub = window.electron?.onFlowPanelStateChanged?.((state) => { + setFlowPanelVisible(Boolean(state?.visible)) + }) + return () => { + mounted = false + unsub?.() + } + }, []) + + useEffect(() => { + const match = location.pathname.match(/^\/projects\/([0-9a-f-]{36})$/i) + if (!match) return + const projectId = match[1] + fetchAPI('/api/active-project', { + method: 'PUT', + body: JSON.stringify({ project_id: projectId }), + }).catch(() => { }) + }, [location.pathname]) + + return ( +
+ + +
+
+
+
+
+ Trung tâm điều khiển +
+ + + +
+
+ + {extensionConnected ? 'Extension kết nối' : 'Extension mất kết nối'} + + + {flowPanelVisible ? 'Panel: Hiện' : 'Panel: Ẩn'} + + + {isConnected ? 'Realtime ổn' : 'Realtime lỗi'} + + +
+
+
+ +
+
+ + } /> + } /> + } /> + } /> + } /> + } /> + } /> + } /> + +
+
+
+ + +
+ ) +} + +function LicenseGate({ + machineId, + license, + onCopyMachineId, + onRefresh, + checking, + feedback, +}: { + machineId: string + license: LicenseCheckResult | null + onCopyMachineId: () => void + onRefresh: () => void + checking: boolean + feedback: string +}) { + const isRevoked = license?.status === 'REVOKED' + const isExpired = license?.status === 'EXPIRED' + + return ( +
+
+
+
+ +
+
+

Kích hoạt bản quyền FlowKit

+

+ Ứng dụng chỉ hoạt động khi thiết bị đã được active trong CMS theo Machine ID. +

+
+
+ +
+
+
Machine ID
+
+ {machineId || '—'} + +
+
+
+ +
+
+ Trạng thái: + + {license ? LICENSE_STATUS_LABEL[license.status] : 'Chưa kiểm tra'} + + {license?.planLabel && Gói: {license.planLabel}} + {license?.source === 'cache' && Bản lưu ngoại tuyến} +
+
+ {license?.message || 'Bấm kiểm tra lại sau khi admin active máy trong CMS.'} +
+
+ Kích hoạt: {formatDateTime(license?.activatedAt ?? null)} · Hết hạn: {formatDateTime(license?.expiresAt ?? null)} +
+
+ Lần kiểm tra gần nhất: {formatDateTime(license?.checkedAt ?? null)} +
+
+ + {isRevoked && ( +
+
Thiết bị đang ở trạng thái REVOKED
+
Lý do: {license?.revokedReason || 'Không có lý do cụ thể từ quản trị viên.'}
+
Machine ID: {machineId}
+
Hướng xử lý: admin cần Active lại license cho đúng Machine ID này trên CMS.
+
+ )} + + {isExpired && ( +
+ License đã hết hạn. Vui lòng gia hạn lại trong CMS rồi bấm “Kiểm tra lại license”. +
+ )} + +
+ +
+ + {feedback && ( +
+ {feedback} +
+ )} +
+ +
+ ) +} + +export default function App() { + const hasLicenseBridge = Boolean( + window.electron + && 'getMachineId' in window.electron + && 'getLicenseStatus' in window.electron + ) + + const [licenseReady, setLicenseReady] = useState(!hasLicenseBridge) + const [machineId, setMachineId] = useState('') + const [license, setLicense] = useState(null) + const [checking, setChecking] = useState(false) + const [feedback, setFeedback] = useState('') + + const runLicenseCheck = async (force: boolean) => { + if (!window.electron?.getLicenseStatus) return + setChecking(true) + try { + const data = await window.electron.getLicenseStatus(force) + setLicense(data) + setFeedback(data.message || '') + } catch (error) { + setFeedback(error instanceof Error ? error.message : 'Không kiểm tra được license.') + } finally { + setChecking(false) + } + } + + const copyMachineId = async () => { + if (!machineId) return + try { + await navigator.clipboard.writeText(machineId) + setFeedback('Đã copy Machine ID.') + } catch { + setFeedback(`Machine ID: ${machineId}`) + } + } + + useEffect(() => { + if (!hasLicenseBridge) { + setLicenseReady(true) + return + } + + let isMounted = true + let timer: ReturnType | null = null + + const init = async () => { + try { + const resolvedMachineId = await window.electron!.getMachineId() + if (!isMounted) return + + setMachineId(resolvedMachineId) + + await runLicenseCheck(true) + } catch (error) { + if (!isMounted) return + setFeedback(error instanceof Error ? error.message : 'Không khởi tạo được license gate.') + } finally { + if (isMounted) setLicenseReady(true) + } + + timer = setInterval(() => { + void runLicenseCheck(false) + }, 30000) + } + + void init() + + return () => { + isMounted = false + if (timer) clearInterval(timer) + } + }, [hasLicenseBridge]) + + useEffect(() => { + if (!hasLicenseBridge || !window.electron?.onLicenseStatusChanged) return + return window.electron.onLicenseStatusChanged((next) => { + setLicense(next) + setFeedback(next.message || '') + if (next.machineId) setMachineId((prev) => prev || next.machineId) + }) + }, [hasLicenseBridge]) + + const licenseUnlocked = !hasLicenseBridge || (license?.allowed && license.status === 'ACTIVE') + + if (!licenseReady) { + return ( +
+ +
+ Đang kiểm tra bản quyền thiết bị... +
+
+ ) + } + + if (!licenseUnlocked) { + return ( +
+ +
+ void runLicenseCheck(true)} + checking={checking} + feedback={feedback} + /> +
+
+ ) + } + + return ( +
+ +
+ + + +
+
+ ) +} diff --git a/desktop/src/api/ai-service.ts b/desktop/src/api/ai-service.ts new file mode 100644 index 0000000..a3cd279 --- /dev/null +++ b/desktop/src/api/ai-service.ts @@ -0,0 +1,868 @@ +/** + * Unified AI Provider Service + * Supports Gemini, Claude (Anthropic), OpenAI, and DeepSeek with per-provider key management + * and automatic key rotation on quota limits (429 / 503) + */ + +export type ProviderType = 'gemini' | 'claude' | 'openai' | 'deepseek' + +export interface APIKey { + id: string + label: string + key: string + status: 'active' | 'limited' | 'invalid' + limitedAt?: number +} + +export interface GeneralSettings { + defaultProvider: ProviderType + defaultLanguage: string + defaultMaterial: string + exportRootDir: string + deepseekModel: string +} + +// ─── localStorage helpers ────────────────────────────────────── +function storageKey(provider: ProviderType) { return `flowkit_keys_${provider}` } +const GENERAL_KEY = 'flowkit_general_settings' +const RATE_LIMIT_COOLDOWN_MS = 10 * 60 * 1000 +const DEFAULT_AI_TIMEOUT_MS = 180000 +const LONG_AI_TIMEOUT_MS = 420000 +const LONG_STORY_THRESHOLD_CHARS = 9000 +const LARGE_SCENE_THRESHOLD = 18 + +function timeoutSignal(timeoutMs?: number): AbortSignal { + const normalized = Number.isFinite(timeoutMs) ? Math.max(30000, Number(timeoutMs)) : DEFAULT_AI_TIMEOUT_MS + return AbortSignal.timeout(normalized) +} + +function stripCodeFence(text: string): string { + const trimmed = text.trim() + const m = trimmed.match(/^```(?:json)?\s*([\s\S]*?)\s*```$/i) + return (m?.[1] ?? trimmed).trim() +} + +function extractBalancedJson(text: string, start: number): string | null { + const open = text[start] + const close = open === '{' ? '}' : open === '[' ? ']' : '' + if (!close) return null + + let depth = 0 + let inString = false + let escape = false + for (let i = start; i < text.length; i += 1) { + const ch = text[i] + if (escape) { + escape = false + continue + } + if (ch === '\\') { + escape = true + continue + } + if (ch === '"') { + inString = !inString + continue + } + if (inString) continue + if (ch === open) depth += 1 + if (ch === close) { + depth -= 1 + if (depth === 0) return text.slice(start, i + 1) + } + } + return null +} + +function parseJsonLoose(raw: string): T { + const text = stripCodeFence(raw) + try { + return JSON.parse(text) as T + } catch { + // fallback below + } + + for (let i = 0; i < text.length; i += 1) { + const ch = text[i] + if (ch !== '{' && ch !== '[') continue + const candidate = extractBalancedJson(text, i) + if (!candidate) continue + try { + return JSON.parse(candidate) as T + } catch { + // keep scanning + } + } + throw new Error('AI response is not valid JSON') +} + +function normalizeProvider(value: unknown, fallback: ProviderType = 'gemini'): ProviderType { + const raw = String(value ?? '').trim().toLowerCase() + if (raw === 'gemini' || raw === 'claude' || raw === 'openai' || raw === 'deepseek') return raw + return fallback +} + +function parseStoredArray(raw: string | null): unknown[] { + if (!raw) return [] + try { + const parsed = JSON.parse(raw) + if (Array.isArray(parsed)) return parsed + if (parsed && typeof parsed === 'object') return [parsed] + return [] + } catch { + return [] + } +} + +function normalizeStatus(value: unknown): APIKey['status'] { + if (value === 'active' || value === 'limited' || value === 'invalid') return value + return 'active' +} + +function extractKeyValue(raw: any): string { + // Legacy payloads may contain `key: 1` (index) and actual secret in `value`. + // Never treat numbers as API keys. + const candidates = [raw?.value, raw?.api_key, raw?.apiKey, raw?.token, raw?.key] + for (const c of candidates) { + if (typeof c === 'string' && c.trim()) return c.trim() + } + return '' +} + +function isLikelyApiKey(value: string): boolean { + const key = value.trim() + if (!key) return false + if (/^\d+$/.test(key)) return false + if (key.length < 12) return false + return true +} + +function legacyStorageKeys(provider: ProviderType): string[] { + if (provider === 'gemini') return ['flowkit_gemini_keys'] + return [`flowkit_${provider}_keys`] +} + +function normalizeKeyList(rawItems: unknown[]): APIKey[] { + const out: APIKey[] = [] + rawItems.forEach((raw, index) => { + if (typeof raw === 'string') { + const key = raw.trim() + if (!key) return + out.push({ + id: Math.random().toString(36).slice(2, 10), + label: `Key ${out.length + 1}`, + key, + status: isLikelyApiKey(key) ? 'active' : 'invalid', + }) + return + } + if (!raw || typeof raw !== 'object') return + const key = extractKeyValue(raw) + if (!key) return + const limitedAt = Number((raw as any).limitedAt) + const cooldownExpired = Number.isFinite(limitedAt) && (Date.now() - limitedAt > RATE_LIMIT_COOLDOWN_MS) + const statusBase = normalizeStatus((raw as any).status) + const status: APIKey['status'] = isLikelyApiKey(key) + ? (statusBase === 'limited' && cooldownExpired ? 'active' : statusBase) + : 'invalid' + out.push({ + id: typeof (raw as any).id === 'string' && (raw as any).id.trim() + ? (raw as any).id.trim() + : Math.random().toString(36).slice(2, 10), + label: typeof (raw as any).label === 'string' && (raw as any).label.trim() + ? (raw as any).label.trim() + : `Key ${index + 1}`, + key, + status, + limitedAt: Number.isFinite(limitedAt) ? limitedAt : undefined, + }) + }) + + // de-dup by key string while preserving the latest occurrence + // (important when user re-adds the same key to reactivate it) + const seen = new Set() + const dedupedReversed: APIKey[] = [] + for (let i = out.length - 1; i >= 0; i -= 1) { + const item = out[i] + const sig = item.key + if (seen.has(sig)) continue + seen.add(sig) + dedupedReversed.push(item) + } + return dedupedReversed.reverse() +} + +function providerName(provider: ProviderType): string { + if (provider === 'gemini') return 'Gemini' + if (provider === 'claude') return 'Claude' + if (provider === 'deepseek') return 'DeepSeek' + return 'OpenAI' +} + +function isInvalidKeyResponse(status: number, bodyText: string): boolean { + if (status === 401 || status === 403) return true + if (status !== 400) return false + const text = bodyText.toLowerCase() + return ( + text.includes('invalid api key') || + text.includes('api key not valid') || + text.includes('incorrect api key') || + text.includes('authentication') || + text.includes('x-api-key') || + text.includes('unauthorized') + ) +} + +export function loadKeys(provider: ProviderType): APIKey[] { + try { + const normalizedProvider = normalizeProvider(provider) + const primaryKey = storageKey(normalizedProvider) + const primaryRaw = parseStoredArray(localStorage.getItem(primaryKey)) + const legacyRaw = legacyStorageKeys(normalizedProvider) + .flatMap(k => parseStoredArray(localStorage.getItem(k))) + const merged = normalizeKeyList([...legacyRaw, ...primaryRaw]) + + // Persist migrated/sanitized shape back to canonical storage + const canonicalRaw = JSON.stringify(primaryRaw) + const canonicalNormalized = JSON.stringify(merged) + if (canonicalRaw !== canonicalNormalized) { + localStorage.setItem(primaryKey, canonicalNormalized) + } + return merged + } catch { + return [] + } +} + +export function saveKeys(provider: ProviderType, keys: APIKey[]) { + const normalizedProvider = normalizeProvider(provider) + const normalized = normalizeKeyList(keys ?? []) + localStorage.setItem(storageKey(normalizedProvider), JSON.stringify(normalized)) +} + +export function loadGeneralSettings(): GeneralSettings { + try { + const raw = JSON.parse(localStorage.getItem(GENERAL_KEY) ?? '{}') + return { + defaultProvider: normalizeProvider(raw.defaultProvider), + defaultLanguage: typeof raw.defaultLanguage === 'string' && raw.defaultLanguage.trim() ? raw.defaultLanguage : 'vi', + defaultMaterial: typeof raw.defaultMaterial === 'string' && raw.defaultMaterial.trim() ? raw.defaultMaterial : 'realistic', + exportRootDir: typeof raw.exportRootDir === 'string' ? raw.exportRootDir.trim() : '', + deepseekModel: typeof raw.deepseekModel === 'string' && raw.deepseekModel.trim() ? raw.deepseekModel.trim() : 'deepseek-chat', + } + } catch { + return { + defaultProvider: 'gemini', + defaultLanguage: 'vi', + defaultMaterial: 'realistic', + exportRootDir: '', + deepseekModel: 'deepseek-chat', + } + } +} + +export function saveGeneralSettings(s: Partial) { + const current = loadGeneralSettings() + const merged = { ...current, ...s } + localStorage.setItem(GENERAL_KEY, JSON.stringify({ + defaultProvider: normalizeProvider(merged.defaultProvider, current.defaultProvider), + defaultLanguage: merged.defaultLanguage || current.defaultLanguage, + defaultMaterial: merged.defaultMaterial || current.defaultMaterial, + exportRootDir: typeof merged.exportRootDir === 'string' ? merged.exportRootDir.trim() : current.exportRootDir, + deepseekModel: typeof merged.deepseekModel === 'string' && merged.deepseekModel.trim() + ? merged.deepseekModel.trim() + : current.deepseekModel, + })) +} + +// ─── Provider implementations ───────────────────────────────── + +async function callGemini(key: string, prompt: string, systemPrompt?: string, timeoutMs?: number): Promise { + const url = `https://generativelanguage.googleapis.com/v1beta/models/gemini-2.0-flash:generateContent?key=${key}` + const body: any = { + contents: [{ role: 'user', parts: [{ text: prompt }] }], + generationConfig: { responseMimeType: 'application/json', temperature: 0.3 }, + } + if (systemPrompt) body.system_instruction = { parts: [{ text: systemPrompt }] } + return fetch(url, { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify(body), + signal: timeoutSignal(timeoutMs), + }) +} + +async function callClaude(key: string, prompt: string, systemPrompt?: string, timeoutMs?: number): Promise { + return fetch('https://api.anthropic.com/v1/messages', { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + 'x-api-key': key, + 'anthropic-version': '2023-06-01', + }, + body: JSON.stringify({ + model: 'claude-3-5-haiku-20241022', + max_tokens: 8192, + system: (systemPrompt ?? '') + '\n\nAlways respond with valid JSON only, no markdown or explanation.', + messages: [{ role: 'user', content: prompt }], + }), + signal: timeoutSignal(timeoutMs), + }) +} + +async function callOpenAI(key: string, prompt: string, systemPrompt?: string, timeoutMs?: number): Promise { + return fetch('https://api.openai.com/v1/chat/completions', { + method: 'POST', + headers: { 'Content-Type': 'application/json', Authorization: `Bearer ${key}` }, + body: JSON.stringify({ + model: 'gpt-4o-mini', + response_format: { type: 'json_object' }, + temperature: 0.3, + max_tokens: 4096, + messages: [ + ...(systemPrompt ? [{ role: 'system', content: systemPrompt }] : []), + { role: 'user', content: prompt }, + ], + }), + signal: timeoutSignal(timeoutMs), + }) +} + +async function callDeepSeek(key: string, prompt: string, systemPrompt?: string, timeoutMs?: number): Promise { + const deepseekModel = loadGeneralSettings().deepseekModel || 'deepseek-chat' + return fetch('https://api.deepseek.com/v1/chat/completions', { + method: 'POST', + headers: { 'Content-Type': 'application/json', Authorization: `Bearer ${key}` }, + body: JSON.stringify({ + model: deepseekModel, + response_format: { type: 'json_object' }, + temperature: 0.3, + max_tokens: 4096, + messages: [ + ...(systemPrompt ? [{ role: 'system', content: systemPrompt }] : []), + { role: 'user', content: prompt }, + ], + }), + signal: timeoutSignal(timeoutMs), + }) +} + +async function parseResponse(res: Response, provider: ProviderType): Promise { + const data = await res.json() + if (provider === 'gemini') return data.candidates?.[0]?.content?.parts?.[0]?.text ?? '{}' + if (provider === 'claude') return data.content?.[0]?.text ?? '{}' + if (provider === 'openai' || provider === 'deepseek') return data.choices?.[0]?.message?.content ?? '{}' + return '{}' +} + +// ─── Main generate function ──────────────────────────────────── + +export async function aiGenerate( + prompt: string, + systemPrompt?: string, + providerOverride?: ProviderType, + options?: { timeoutMs?: number } +): Promise { + const provider = normalizeProvider(providerOverride ?? loadGeneralSettings().defaultProvider) + const allKeys = loadKeys(provider) + const activeKeys = allKeys.filter(k => k.status !== 'invalid' && isLikelyApiKey(k.key)) + + if (activeKeys.length === 0) { + const name = providerName(provider) + if (allKeys.length > 0) { + throw new Error(`${name} key không hợp lệ trong Settings. Vui lòng nhập lại key đúng định dạng ở Cài đặt → ${name}.`) + } + throw new Error(`No ${name} API keys configured. Go to Settings → ${name}.`) + } + + const callers: Record = { + gemini: callGemini, + claude: callClaude, + openai: callOpenAI, + deepseek: callDeepSeek, + } + + let lastError: Error | null = null + for (const keyObj of activeKeys) { + try { + const res = await callers[provider](keyObj.key.trim(), prompt, systemPrompt, options?.timeoutMs) + + if (res.status === 429 || res.status === 503) { + const updated = loadKeys(provider).map(k => + k.id === keyObj.id ? { ...k, status: 'limited' as const, limitedAt: Date.now() } : k + ) + saveKeys(provider, updated) + lastError = new Error(`Key "${keyObj.label}" hit quota — rotating to next key`) + continue + } + + if (!res.ok) { + const errText = await res.text().catch(() => '') + if (isInvalidKeyResponse(res.status, errText)) { + const updated = loadKeys(provider).map(k => + k.id === keyObj.id ? { ...k, status: 'invalid' as const } : k + ) + saveKeys(provider, updated) + lastError = new Error(`Key "${keyObj.label}" is invalid (HTTP ${res.status})`) + continue + } + throw new Error(`AI API error ${res.status}: ${errText || res.statusText}`) + } + + const text = await parseResponse(res, provider) + return parseJsonLoose(text) + } catch (err) { + if (err instanceof Error && (err.message.includes('quota') || err.message.includes('rotating'))) continue + throw err + } + } + throw lastError ?? new Error('All API keys exhausted') +} + +// ─── Prompt templates ────────────────────────────────────────── + +const SYSTEM_PROMPT = `You are a creative AI assistant for AI video documentary generation. +Always respond with valid JSON only, no markdown, no explanation.` + +export interface ResearchResult { + summary: string + key_facts: string[] + suggested_story_angle: string + suggested_characters: { name: string; role: string }[] +} + +function languageLabel(language: string): string { + if (language === 'vi') return 'Vietnamese' + if (language === 'en') return 'English' + if (language === 'es') return 'Spanish' + return language +} + +export async function researchTopic(topic: string, language: string, provider?: ProviderType): Promise { + const prompt = `Research this topic for an AI video documentary: "${topic}" + +Language for output: ${languageLabel(language)} + +Return JSON: +{ + "summary": "2-3 paragraph summary of the topic with key facts", + "key_facts": ["fact 1", "fact 2", "fact 3", "fact 4", "fact 5"], + "suggested_story_angle": "Suggested narrative angle for a compelling documentary", + "suggested_characters": [ + {"name": "Person/Entity name", "role": "Their role in the story"} + ] +} + +Base on your knowledge. Include specific dates, names, numbers when available.` + return aiGenerate(prompt, SYSTEM_PROMPT, provider) +} + +export interface ExtractedProject { + description: string + characters: { + name: string + entity_type: 'character' | 'location' | 'creature' | 'visual_asset' | 'generic_troop' | 'faction' + description: string + voice_description?: string + }[] + scenes: { + prompt: string + video_prompt: string + narrator_text: string + character_names: string[] + }[] +} + +type ExtractedScene = ExtractedProject['scenes'][number] +type ExtractedCharacter = ExtractedProject['characters'][number] + +const ENTITY_TYPES: ExtractedCharacter['entity_type'][] = [ + 'character', + 'location', + 'creature', + 'visual_asset', + 'generic_troop', + 'faction', +] + +const VIDEO_PROMPT_VARIETY_RULES = ` +VIDEO PROMPT DIVERSITY RULES: +- Do NOT reuse one rigid timing motif across all scenes. +- Mix these structures across the episode: + 1) [00:00-00:02] ... [00:02-00:05] ... [00:05-00:08] ... + 2) 0-4s ... 4-8s ... + 3) One continuous 8-second single-take camera instruction. +- Vary shot intent between adjacent scenes: establishing, reaction, detail insert, reveal, transition, climax, release. +- Vary camera language: static, tracking, dolly in/out, pan, tilt, overhead, handheld, arc, rack focus. +- Keep prompts action-focused; do not describe character appearance. +- Keep each video_prompt concise and production-ready.` + +const FALLBACK_VIDEO_PATTERNS: string[] = [ + '[00:00-00:02] Establish environment and immediate action. [00:02-00:05] Slow dolly-in as the action intensifies. [00:05-00:08] Hold a clean transition beat into the next scene.', + '0-4s: Medium framing follows the main action with steady tracking. 4-8s: Shift to a wider reveal and end with directional motion for the next cut.', + '[00:00-00:03] Close detail on the key action. [00:03-00:06] Pan to reveal context and secondary movement. [00:06-00:08] Brief still moment that resolves tension.', + 'Single-take over 8 seconds: handheld documentary movement follows the subject naturally, then eases into a stable frame for the transition.', + '[00:00-00:02] Wide establishing shot. [00:02-00:06] Arc movement around the action for depth. [00:06-00:08] Cut on motion toward the next narrative beat.', + '0-2s: Top-down or high-angle setup to orient geography. 2-6s: Drop to eye-level and track the action. 6-8s: End on a reaction beat that bridges to next scene.', +] + +function normalizeStringArray(raw: unknown, limit = 20): string[] { + if (!Array.isArray(raw)) return [] + return raw + .filter((item): item is string => typeof item === 'string') + .map(item => item.trim()) + .filter(Boolean) + .slice(0, limit) +} + +function buildFallbackVideoPrompt(index: number): string { + return FALLBACK_VIDEO_PATTERNS[index % FALLBACK_VIDEO_PATTERNS.length] +} + +function normalizeScene(raw: any, index = 0): ExtractedScene | null { + if (!raw || typeof raw !== 'object') return null + const prompt = typeof raw.prompt === 'string' ? raw.prompt.trim() : '' + const videoPrompt = typeof raw.video_prompt === 'string' ? raw.video_prompt.trim() : '' + const narrator = typeof raw.narrator_text === 'string' ? raw.narrator_text.trim() : '' + const characterNames = Array.isArray(raw.character_names) + ? raw.character_names + .filter((v: unknown): v is string => typeof v === 'string') + .map((v: string) => v.trim()) + .filter((v: string) => Boolean(v)) + : [] + + if (!prompt && !videoPrompt && !narrator) return null + + return { + prompt: prompt || 'Cinematic continuation shot in the same setting, keeping action continuity.', + video_prompt: videoPrompt || buildFallbackVideoPrompt(index), + narrator_text: narrator, + character_names: characterNames, + } +} + +function normalizeSceneArray(rawScenes: unknown): ExtractedScene[] { + if (!Array.isArray(rawScenes)) return [] + return rawScenes + .map((scene, index) => normalizeScene(scene, index)) + .filter((scene): scene is ExtractedScene => Boolean(scene)) +} + +function normalizeCharacters(rawChars: unknown): ExtractedCharacter[] { + if (!Array.isArray(rawChars)) return [] + return rawChars + .map((raw): ExtractedCharacter | null => { + if (!raw || typeof raw !== 'object') return null + const name = typeof (raw as any).name === 'string' ? (raw as any).name.trim() : '' + if (!name) return null + const entityTypeRaw = typeof (raw as any).entity_type === 'string' ? (raw as any).entity_type.trim() : '' + const entityType = ENTITY_TYPES.includes(entityTypeRaw as ExtractedCharacter['entity_type']) + ? (entityTypeRaw as ExtractedCharacter['entity_type']) + : 'character' + return { + name, + entity_type: entityType, + description: typeof (raw as any).description === 'string' ? (raw as any).description.trim() : '', + voice_description: typeof (raw as any).voice_description === 'string' + ? (raw as any).voice_description.trim() + : undefined, + } + }) + .filter((char): char is ExtractedCharacter => Boolean(char)) +} + +function buildFallbackScene(index: number, total: number): ExtractedScene { + const n = index + 1 + return { + prompt: `Cinematic continuation scene ${n}/${total}. Keep setting and story continuity.`, + video_prompt: buildFallbackVideoPrompt(index), + narrator_text: '', + character_names: [], + } +} + +function shouldUseChunkedStoryAnalysis(story: string, sceneCount: number): boolean { + return story.trim().length >= LONG_STORY_THRESHOLD_CHARS || sceneCount >= LARGE_SCENE_THRESHOLD +} + +function compactStoryForPrompt(story: string, maxChars = 22000): string { + const text = story.trim() + if (text.length <= maxChars) return text + const cut = Math.max(4000, Math.floor((maxChars - 128) / 2)) + return `${text.slice(0, cut)}\n\n[...TRUNCATED ${text.length - (cut * 2)} CHARS...]\n\n${text.slice(-cut)}` +} + +function sceneBatchSize(sceneCount: number): number { + if (sceneCount >= 60) return 10 + if (sceneCount >= 32) return 8 + if (sceneCount >= 20) return 7 + return 6 +} + +function pickArcBeatsForRange(arcBeats: string[], startIndex: number, endIndex: number, totalScenes: number): string[] { + if (arcBeats.length <= 6) return arcBeats + const fromRatio = startIndex / Math.max(1, totalScenes) + const toRatio = endIndex / Math.max(1, totalScenes) + const selected = arcBeats.filter((_, idx) => { + const beatRatio = idx / Math.max(1, arcBeats.length - 1) + return beatRatio >= (fromRatio - 0.2) && beatRatio <= (toRatio + 0.2) + }) + return selected.length > 0 ? selected.slice(0, 8) : arcBeats.slice(0, 8) +} + +interface StoryBlueprint { + description: string + characters: ExtractedCharacter[] + arcBeats: string[] + visualLanguage: string[] + pacingNotes: string[] +} + +async function buildStoryBlueprint(story: string, language: string, provider?: ProviderType): Promise { + const lang = languageLabel(language) + const prompt = `Create a compact production blueprint for a long-form AI video project. + +STORY: +${compactStoryForPrompt(story)} + +Return JSON only: +{ + "description": "One-line project description in ${lang}", + "characters": [ + { + "name": "Name", + "entity_type": "character|location|creature|visual_asset|generic_troop|faction", + "description": "Physical appearance for entities only, concise", + "voice_description": "TTS voice style for speaking characters" + } + ], + "arc_beats": ["Beat 1", "Beat 2", "Beat 3"], + "visual_language": ["cinematic style rule 1", "style rule 2"], + "pacing_notes": ["pacing note 1", "pacing note 2"] +} + +RULES: +- Keep arc_beats concise and chronological +- Include all important entities in characters +- Output valid JSON only` + + const raw = await aiGenerate<{ + description?: unknown + characters?: unknown + arc_beats?: unknown + visual_language?: unknown + pacing_notes?: unknown + }>(prompt, SYSTEM_PROMPT, provider, { timeoutMs: LONG_AI_TIMEOUT_MS }) + + return { + description: typeof raw?.description === 'string' ? raw.description.trim() : '', + characters: normalizeCharacters(raw?.characters), + arcBeats: normalizeStringArray(raw?.arc_beats, 20), + visualLanguage: normalizeStringArray(raw?.visual_language, 12), + pacingNotes: normalizeStringArray(raw?.pacing_notes, 12), + } +} + +async function generateScenesInBatches( + story: string, + language: string, + sceneCount: number, + blueprint: StoryBlueprint, + provider?: ProviderType +): Promise { + const lang = languageLabel(language) + const batch = sceneBatchSize(sceneCount) + const generated: ExtractedScene[] = [] + const totalBatches = Math.ceil(sceneCount / batch) + const storySnippet = compactStoryForPrompt(story, 12000) + + for (let batchIndex = 0; batchIndex < totalBatches; batchIndex += 1) { + const start = batchIndex * batch + const end = Math.min(sceneCount, start + batch) + const count = end - start + const sceneFrom = start + 1 + const sceneTo = end + const previousContext = generated.slice(-2).map((scene, idx) => { + const no = start - Math.max(0, generated.slice(-2).length - idx) + 1 + return `Scene ${no}: narrator="${scene.narrator_text}" | prompt="${scene.prompt}"` + }).join('\n') + const arcBeats = pickArcBeatsForRange(blueprint.arcBeats, start, end, sceneCount) + const prompt = `Create scenes ${sceneFrom}-${sceneTo} of ${sceneCount} for one coherent episode. + +PROJECT DESCRIPTION: +${blueprint.description || 'Documentary-style visual storytelling'} + +ARC BEATS TO COVER IN THIS BATCH: +${arcBeats.map((beat, idx) => `${idx + 1}. ${beat}`).join('\n') || '- Keep continuity from previous beats'} + +VISUAL LANGUAGE: +${blueprint.visualLanguage.map((item, idx) => `${idx + 1}. ${item}`).join('\n') || '- Cinematic, documentary realism'} + +PACING NOTES: +${blueprint.pacingNotes.map((item, idx) => `${idx + 1}. ${item}`).join('\n') || '- Rhythmic pacing with clear transitions'} + +SHORT STORY CONTEXT: +${storySnippet} + +LAST GENERATED SCENES (continuity anchor): +${previousContext || 'None (this is the first batch)'} + +Return JSON: +{ + "scenes": [ + { + "prompt": "IMAGE PROMPT in English, action + setting only, max 2 sentences", + "video_prompt": "Detailed motion plan optimized for Veo", + "narrator_text": "Narration in ${lang}, 1-2 sentences", + "character_names": ["character names"] + } + ] +} + +RULES: +- Return exactly ${count} scenes for this batch +- Keep strict continuity with previous scenes +- Image prompt must avoid character appearance details +${VIDEO_PROMPT_VARIETY_RULES}` + + const raw = await aiGenerate<{ scenes?: unknown }>(prompt, SYSTEM_PROMPT, provider, { timeoutMs: LONG_AI_TIMEOUT_MS }) + let batchScenes = normalizeSceneArray(raw?.scenes) + batchScenes = ensureExactSceneCount(batchScenes, count) + generated.push(...batchScenes) + } + + return generated.slice(0, sceneCount) +} + +function ensureExactSceneCount(scenes: ExtractedScene[], targetCount: number): ExtractedScene[] { + if (targetCount <= 0) return [] + const out = [...scenes] + if (out.length > targetCount) return out.slice(0, targetCount) + while (out.length < targetCount) { + out.push(buildFallbackScene(out.length, targetCount)) + } + return out +} + +async function rebalanceScenesWithAI( + story: string, + language: string, + sceneCount: number, + sourceScenes: ExtractedScene[], + provider?: ProviderType +): Promise { + const lang = languageLabel(language) + const prompt = `Rewrite the scenes below to exactly ${sceneCount} scenes for one coherent episode. + +STORY: +${story} + +CURRENT SCENES JSON: +${JSON.stringify(sourceScenes)} + +Return JSON only: +{ + "scenes": [ + { + "prompt": "IMAGE PROMPT in English, action + setting only, max 2 sentences", + "video_prompt": "Detailed motion plan optimized for Veo", + "narrator_text": "Narration in ${lang}, 1-2 sentences", + "character_names": ["character names"] + } + ] +} + +RULES: +- Exactly ${sceneCount} scenes +- Keep story continuity and full arc +- Do not describe character appearance in image prompt +${VIDEO_PROMPT_VARIETY_RULES}` + + const repaired = await aiGenerate<{ scenes?: unknown }>(prompt, SYSTEM_PROMPT, provider, { timeoutMs: LONG_AI_TIMEOUT_MS }) + const scenes = normalizeSceneArray(repaired?.scenes) + return scenes.length > 0 ? scenes : null +} + +export async function analyzeStory(story: string, language: string, sceneCount = 8, provider?: ProviderType): Promise { + const lang = languageLabel(language) + const prompt = `Analyze this story for AI video generation. Extract exactly ${sceneCount} scenes. + +STORY: +${compactStoryForPrompt(story)} + +Return JSON with this schema: +{ + "description": "One-line project description in ${lang}", + "characters": [ + { + "name": "Name", + "entity_type": "character|location|creature|visual_asset|generic_troop|faction", + "description": "Physical appearance only (2-3 sentences). For characters: age, clothing, hair, build. For locations: atmosphere and setting details.", + "voice_description": "TTS voice style (only for speaking characters)" + } + ], + "scenes": [ + { + "prompt": "IMAGE PROMPT: Scene visual in English. Describe ACTION and SETTING only. Never describe character appearance (ref images handle that). Max 2 sentences.", + "video_prompt": "VIDEO MOTION: Detailed cinematic motion plan optimized for Veo", + "narrator_text": "Narrator voiceover in ${lang} for this scene. 1-2 sentences.", + "character_names": ["Names of characters in this scene"] + } + ] +} + +RULES: +- Extract ALL named characters, key locations, creatures, important objects +- Exactly ${sceneCount} scenes covering the full story arc +- Image prompts in English for best AI generation +- Narrator text in ${lang} +- Scene prompts reference actions NOT appearance +${VIDEO_PROMPT_VARIETY_RULES}` + + let raw: ExtractedProject | null = null + const useChunked = shouldUseChunkedStoryAnalysis(story, sceneCount) + + if (useChunked) { + try { + const blueprint = await buildStoryBlueprint(story, language, provider) + const scenes = await generateScenesInBatches(story, language, sceneCount, blueprint, provider) + raw = { + description: blueprint.description, + characters: blueprint.characters, + scenes, + } + } catch { + // fallback to one-shot mode below + } + } + + if (!raw) { + raw = await aiGenerate(prompt, SYSTEM_PROMPT, provider, { + timeoutMs: useChunked ? LONG_AI_TIMEOUT_MS : DEFAULT_AI_TIMEOUT_MS, + }) + } + + const description = typeof raw?.description === 'string' ? raw.description.trim() : '' + const characters = normalizeCharacters(raw?.characters) + let scenes = normalizeSceneArray(raw?.scenes) + + if (scenes.length !== sceneCount) { + try { + const repaired = await rebalanceScenesWithAI(story, language, sceneCount, scenes, provider) + if (repaired && repaired.length > 0) scenes = repaired + } catch { + // Keep best-effort local normalization below. + } + } + + scenes = ensureExactSceneCount(scenes, sceneCount) + if (scenes.length === 0) { + throw new Error('AI did not return any valid scenes') + } + + return { + description, + characters, + scenes, + } +} diff --git a/desktop/src/api/client.ts b/desktop/src/api/client.ts new file mode 100644 index 0000000..c53249f --- /dev/null +++ b/desktop/src/api/client.ts @@ -0,0 +1,18 @@ +// Always use absolute URL to the agent — works in both Electron and browser +const AGENT_BASE = 'http://127.0.0.1:8100' + +export async function fetchAPI(path: string, options?: RequestInit): Promise { + const res = await fetch(`${AGENT_BASE}${path}`, { + headers: { 'Content-Type': 'application/json', ...options?.headers }, + ...options, + }) + if (!res.ok) { + const err = await res.text().catch(() => res.statusText) + throw new Error(`API ${res.status}: ${err}`) + } + return res.json() +} + +export async function patchAPI(path: string, body: Record): Promise { + return fetchAPI(path, { method: 'PATCH', body: JSON.stringify(body) }) +} diff --git a/desktop/src/api/gemini.ts b/desktop/src/api/gemini.ts new file mode 100644 index 0000000..8db2464 --- /dev/null +++ b/desktop/src/api/gemini.ts @@ -0,0 +1,169 @@ +// Gemini API key management — stored in localStorage +// Supports multiple keys with automatic rotation on quota limit (429) + +export interface GeminiKey { + id: string + label: string + key: string + status: 'active' | 'limited' | 'invalid' + limitedAt?: number // timestamp +} + +const STORAGE_KEY = 'flowkit_gemini_keys' +const ACTIVE_IDX_KEY = 'flowkit_gemini_active' + +export function loadKeys(): GeminiKey[] { + try { + return JSON.parse(localStorage.getItem(STORAGE_KEY) ?? '[]') + } catch { return [] } +} + +export function saveKeys(keys: GeminiKey[]) { + localStorage.setItem(STORAGE_KEY, JSON.stringify(keys)) +} + +export function getActiveIndex(): number { + return parseInt(localStorage.getItem(ACTIVE_IDX_KEY) ?? '0', 10) +} + +export function setActiveIndex(idx: number) { + localStorage.setItem(ACTIVE_IDX_KEY, String(idx)) +} + +function getNextActiveKey(keys: GeminiKey[]): GeminiKey | null { + const active = keys.filter(k => k.status === 'active') + return active[0] ?? null +} + +export class GeminiService { + private static call(key: string, prompt: string, systemPrompt?: string) { + const url = `https://generativelanguage.googleapis.com/v1beta/models/gemini-2.0-flash:generateContent?key=${key}` + const contents: any[] = [{ role: 'user', parts: [{ text: prompt }] }] + const body: any = { contents } + if (systemPrompt) { + body.system_instruction = { parts: [{ text: systemPrompt }] } + } + body.generationConfig = { responseMimeType: 'application/json', temperature: 0.3 } + return fetch(url, { method: 'POST', headers: { 'Content-Type': 'application/json' }, body: JSON.stringify(body) }) + } + + /** + * Call Gemini with auto key rotation on 429 quota limits. + * Returns parsed JSON from the model response. + */ + static async generate(prompt: string, systemPrompt?: string): Promise { + const keys = loadKeys() + if (keys.length === 0) throw new Error('No Gemini API keys configured. Go to Settings → API Keys.') + + const active = keys.filter(k => k.status !== 'invalid') + if (active.length === 0) throw new Error('All Gemini API keys are invalid or rate-limited.') + + let lastError: Error | null = null + for (const keyObj of active) { + try { + const res = await this.call(keyObj.key, prompt, systemPrompt) + + if (res.status === 429 || res.status === 503) { + // Mark this key as limited, try next + const updated = loadKeys().map(k => + k.id === keyObj.id ? { ...k, status: 'limited' as const, limitedAt: Date.now() } : k + ) + saveKeys(updated) + lastError = new Error(`Key "${keyObj.label}" hit quota limit — rotated to next key`) + continue + } + + if (res.status === 400 || res.status === 401 || res.status === 403) { + // Mark key as invalid + const updated = loadKeys().map(k => + k.id === keyObj.id ? { ...k, status: 'invalid' as const } : k + ) + saveKeys(updated) + lastError = new Error(`Key "${keyObj.label}" is invalid (${res.status})`) + continue + } + + if (!res.ok) { + throw new Error(`Gemini API error ${res.status}: ${await res.text()}`) + } + + const data = await res.json() + const text = data.candidates?.[0]?.content?.parts?.[0]?.text ?? '{}' + return JSON.parse(text) as T + } catch (err) { + if (err instanceof Error && err.message.includes('quota')) continue + throw err + } + } + + throw lastError ?? new Error('All Gemini API keys exhausted') + } +} + +// ─── Project Analysis Prompt ────────────────────────────────── + +export interface ExtractedProject { + description: string + characters: { + name: string + entity_type: 'character' | 'location' | 'creature' | 'visual_asset' | 'generic_troop' | 'faction' + description: string + voice_description?: string + }[] + scenes: { + prompt: string + video_prompt: string + narrator_text: string + character_names: string[] + }[] +} + +const SYSTEM_PROMPT = `You are a creative AI assistant that analyzes story scripts and extracts structured data for AI video generation. Always return valid JSON only, no markdown.` + +export async function analyzeStory(story: string, language: string, sceneCount?: number): Promise { + const count = sceneCount ?? 8 + const languageLabel = language === 'vi' + ? 'Vietnamese' + : language === 'en' + ? 'English' + : language === 'es' + ? 'Spanish' + : language + const prompt = `Analyze this story/script and extract structured data for AI video generation. + +STORY: +${story} + +LANGUAGE: ${language} + +Return JSON with this exact schema: +{ + "description": "One-line project description in the story's language", + "characters": [ + { + "name": "Character/entity name", + "entity_type": "character|location|creature|visual_asset|generic_troop|faction", + "description": "Physical appearance and role description (2-3 sentences). For characters: describe clothing, age, physical features. For locations: describe the place.", + "voice_description": "Voice style for TTS (only for characters/creatures who speak)" + } + ], + "scenes": [ + { + "prompt": "Visual scene image prompt describing ACTION and SETTING only. Never describe character appearance (that comes from ref images). Max 2 sentences.", + "video_prompt": "8-second video motion description: '0-3s: [camera/action]. 3-6s: [action]. 6-8s: [closing shot].'", + "narrator_text": "Narrator voiceover text for this scene (in ${languageLabel})", + "character_names": ["Names of characters appearing in this scene"] + } + ] +} + +Rules: +- Extract ${count} scenes total that tell the full story arc +- Identify ALL named characters, key locations, and important objects/creatures +- narrator_text must be in the same language as the story (${languageLabel}) +- scene prompts in English for best AI image generation results +- entity_type: use "character" for people, "location" for places, "creature" for animals/monsters, "visual_asset" for objects +- Return ONLY the JSON object, no markdown` + + return GeminiService.generate(prompt, SYSTEM_PROMPT) +} diff --git a/desktop/src/api/useExtensionStatus.ts b/desktop/src/api/useExtensionStatus.ts new file mode 100644 index 0000000..f8111a0 --- /dev/null +++ b/desktop/src/api/useExtensionStatus.ts @@ -0,0 +1,86 @@ +import { useState, useEffect, useCallback } from 'react' +import { fetchAPI } from './client' + +interface HealthStatus { + status: string + extension_connected: boolean +} + +interface FlowRuntimeStatus { + connected?: boolean + runtime_connected?: boolean + state?: string + manual_disconnect?: boolean + flow_tab_id?: number | null + flow_tab_url?: string | null +} + +let _cached: boolean | null = null +const _listeners = new Set<(v: boolean) => void>() + +// Broadcast to all mounted hooks +function broadcast(v: boolean) { + _cached = v + _listeners.forEach(fn => fn(v)) +} + +// Single background poll shared across hook instances +let _pollInterval: ReturnType | null = null +let _mountCount = 0 + +async function doCheck() { + try { + const [health, runtime] = await Promise.all([ + fetchAPI('/health').catch(() => ({ status: 'error', extension_connected: false })), + fetchAPI('/api/flow/status').catch(() => ({})), + ]) + const wsConnected = !!health.extension_connected + const runtimeConnected = + runtime.runtime_connected !== undefined + ? !!runtime.runtime_connected + : (!!runtime.connected && runtime.state !== 'off' && runtime.manual_disconnect !== true) + const hasFlowTab = + runtime.flow_tab_id === undefined + ? true + : ((runtime.flow_tab_id !== null && runtime.flow_tab_id !== undefined) || !!runtime.flow_tab_url) + const connected = wsConnected && runtimeConnected && hasFlowTab + broadcast(connected) + return connected + } catch { + broadcast(false) + return false + } +} + +export function useExtensionStatus() { + const [connected, setConnected] = useState(_cached ?? false) + + const check = useCallback(() => doCheck(), []) + + useEffect(() => { + _listeners.add(setConnected) + _mountCount++ + + // If we already have a cached value, apply it immediately + if (_cached !== null) setConnected(_cached) + + // Start shared poll only if not already running + if (!_pollInterval) { + doCheck() // immediate + _pollInterval = setInterval(doCheck, 5000) + } else { + doCheck() // refresh on mount + } + + return () => { + _listeners.delete(setConnected) + _mountCount-- + if (_mountCount === 0 && _pollInterval) { + clearInterval(_pollInterval) + _pollInterval = null + } + } + }, []) + + return { connected, check } +} diff --git a/desktop/src/api/useWebSocket.ts b/desktop/src/api/useWebSocket.ts new file mode 100644 index 0000000..0f80d2e --- /dev/null +++ b/desktop/src/api/useWebSocket.ts @@ -0,0 +1,46 @@ +import { useState, useEffect, useRef, useCallback } from 'react' +import type { WSEvent } from '../types' + +// Fixed URL for Electron — no window.location.host in file:// context +const WS_URL = 'ws://127.0.0.1:8100/ws/dashboard' + +export function useWebSocket() { + const [isConnected, setIsConnected] = useState(false) + const [lastEvent, setLastEvent] = useState(null) + const wsRef = useRef(null) + const retriesRef = useRef(0) + + const connect = useCallback(() => { + const ws = new WebSocket(WS_URL) + wsRef.current = ws + + ws.onopen = () => { + setIsConnected(true) + retriesRef.current = 0 + } + + ws.onmessage = (e) => { + try { + const event: WSEvent = JSON.parse(e.data) + setLastEvent(event) + } catch { } + } + + ws.onclose = () => { + setIsConnected(false) + wsRef.current = null + const delay = Math.min(1000 * 2 ** retriesRef.current, 30000) + retriesRef.current++ + setTimeout(connect, delay) + } + + ws.onerror = () => ws.close() + }, []) + + useEffect(() => { + connect() + return () => { wsRef.current?.close() } + }, [connect]) + + return { isConnected, lastEvent } +} diff --git a/desktop/src/components/gallery/VideoGallery.tsx b/desktop/src/components/gallery/VideoGallery.tsx new file mode 100644 index 0000000..8c41e14 --- /dev/null +++ b/desktop/src/components/gallery/VideoGallery.tsx @@ -0,0 +1,91 @@ +import { useState } from 'react' +import type { Scene } from '../../types' +import VideoPlayer from './VideoPlayer' +import { orientationAspectCss, orientationPrefix, sceneUrl } from '../../lib/orientation' + +interface VideoGalleryProps { + scenes: Scene[] + orientation?: string +} + +export default function VideoGallery({ scenes, orientation }: VideoGalleryProps) { + const [activeIndex, setActiveIndex] = useState(null) + const prefix = orientationPrefix(orientation) + + const videoscenes = scenes.filter(s => { + const video = sceneUrl(s, orientation, 'video') + const upscale = sceneUrl(s, orientation, 'upscale') + return Boolean(video || upscale) + }) + + if (videoscenes.length === 0) { + return ( +
+ Chưa có video hoàn tất. +
+ ) + } + + return ( + <> +
+ {videoscenes.map((scene, idx) => ( +
setActiveIndex(idx)} + > + {/* Thumbnail */} +
+ {sceneUrl(scene, orientation, 'image') ? ( + {`Cảnh + ) : ( +
+ Chưa có ảnh +
+ )} + + {/* Overlay */} +
+
+ + #{scene.display_order + 1} + +
+ {sceneUrl(scene, orientation, 'video') && ( + + ✓ + + )} + {sceneUrl(scene, orientation, 'upscale') && ( + + ★ + + )} +
+
+
+ {scene.prompt?.slice(0, 60) ?? ''} +
+
+
+
+ ))} +
+ + {activeIndex !== null && ( + setActiveIndex(null)} + /> + )} + + ) +} diff --git a/desktop/src/components/gallery/VideoPlayer.tsx b/desktop/src/components/gallery/VideoPlayer.tsx new file mode 100644 index 0000000..2a1f7c1 --- /dev/null +++ b/desktop/src/components/gallery/VideoPlayer.tsx @@ -0,0 +1,161 @@ +import { useState, useEffect } from 'react' +import type { Scene } from '../../types' + +interface VideoPlayerProps { + scenes: Scene[] + orientation?: 'vertical' | 'horizontal' + initialIndex: number + onClose: () => void +} + +function parseCharacterNames(raw: string[] | string | null): string[] { + if (!raw) return [] + if (Array.isArray(raw)) return raw + try { + const parsed = JSON.parse(raw) + if (Array.isArray(parsed)) return parsed + return [] + } catch { + return [] + } +} + +export default function VideoPlayer({ scenes, orientation = 'vertical', initialIndex, onClose }: VideoPlayerProps) { + const [index, setIndex] = useState(initialIndex) + const scene = scenes[index] + const primary = orientation + const secondary = primary === 'vertical' ? 'horizontal' : 'vertical' + const videoSrc = ( + scene[`${primary}_upscale_url` as keyof Scene] as string | null | undefined + ) || ( + scene[`${primary}_video_url` as keyof Scene] as string | null | undefined + ) || ( + scene[`${secondary}_upscale_url` as keyof Scene] as string | null | undefined + ) || ( + scene[`${secondary}_video_url` as keyof Scene] as string | null | undefined + ) || '' + const charNames = parseCharacterNames(scene.character_names) + + useEffect(() => { + function onKey(e: KeyboardEvent) { + if (e.key === 'Escape') onClose() + if (e.key === 'ArrowLeft' && index > 0) setIndex(i => i - 1) + if (e.key === 'ArrowRight' && index < scenes.length - 1) setIndex(i => i + 1) + } + window.addEventListener('keydown', onKey) + return () => window.removeEventListener('keydown', onKey) + }, [index, scenes.length, onClose]) + + function chainBadgeStyle(ct: string) { + if (ct === 'ROOT') return { background: 'var(--accent)', color: '#fff' } + if (ct === 'CONTINUATION') return { background: 'var(--green)', color: '#fff' } + return { background: 'var(--yellow)', color: '#000' } + } + + return ( +
+
e.stopPropagation()} + > + {/* Close button */} + + + {/* Video */} +
+
+ + {/* Sidebar */} +
+
+ + Cảnh #{scene.display_order + 1} + + + {scene.chain_type} + +
+ + {scene.prompt && ( +
+
PROMPT ẢNH
+
{scene.prompt}
+
+ )} + + {scene.video_prompt && ( +
+
VIDEO PROMPT
+
{scene.video_prompt}
+
+ )} + + {charNames.length > 0 && ( +
+
NHÂN VẬT
+
+ {charNames.map(name => ( + + {name} + + ))} +
+
+ )} + + {/* Download */} + + Tải video + + + {/* Prev / Next */} +
+ + +
+
+
+
+ ) +} diff --git a/desktop/src/components/logs/LogViewer.tsx b/desktop/src/components/logs/LogViewer.tsx new file mode 100644 index 0000000..57a0da4 --- /dev/null +++ b/desktop/src/components/logs/LogViewer.tsx @@ -0,0 +1,99 @@ +import { useState, useEffect, useRef } from 'react' +import { Input } from '../ui/input' +import { Button } from '../ui/button' +import { ScrollArea } from '../ui/scroll-area' +import { Badge } from '../ui/badge' +import { cn } from '../../lib/utils' + +interface LogLine { + ts: string + level: string + type: string + msg: string +} + +export default function LogViewer() { + const [lines, setLines] = useState([]) + const [filter, setFilter] = useState('') + const bottomRef = useRef(null) + + useEffect(() => { + const ws = new WebSocket('ws://127.0.0.1:8100/ws/dashboard') + ws.onmessage = (e) => { + try { + const data = JSON.parse(e.data) + const ts = new Date().toLocaleTimeString() + if (data.type === 'log' && data.data?.message) { + setLines(prev => [...prev.slice(-499), { + ts, level: data.data.level ?? 'INFO', type: data.type, msg: data.data.message, + }]) + return + } + const payload = data.data ? JSON.stringify(data.data) : '' + setLines(prev => [...prev.slice(-499), { + ts, + level: data.type === 'request_failed' ? 'ERROR' : 'INFO', + type: data.type ?? 'event', + msg: payload ? `${data.type}: ${payload}` : String(data.type ?? 'event'), + }]) + } catch { } + } + return () => ws.close() + }, []) + + useEffect(() => { + bottomRef.current?.scrollIntoView({ behavior: 'smooth' }) + }, [lines]) + + const filtered = filter + ? lines.filter(l => + l.msg.toLowerCase().includes(filter.toLowerCase()) || + l.type.toLowerCase().includes(filter.toLowerCase())) + : lines + + const levelColor = (level: string) => { + if (level === 'ERROR') return 'text-red-500' + if (level === 'WARNING') return 'text-amber-500' + return 'text-[hsl(var(--muted-foreground))]' + } + + return ( +
+ {/* Toolbar */} +
+ setFilter(e.target.value)} + placeholder="Lọc nhật ký..." + className="flex-1 text-xs font-mono" + /> + + + {filtered.length} dòng + +
+ + {/* Log content */} + + {filtered.length === 0 && ( +
+ Chưa có nhật ký — đang chờ agent hoạt động... +
+ )} +
+ {filtered.map((line, i) => ( +
+ {line.ts} + {line.level} + {line.type} + {line.msg} +
+ ))} +
+
+ +
+ ) +} diff --git a/desktop/src/components/logs/StatusDashboard.tsx b/desktop/src/components/logs/StatusDashboard.tsx new file mode 100644 index 0000000..e6130e5 --- /dev/null +++ b/desktop/src/components/logs/StatusDashboard.tsx @@ -0,0 +1,197 @@ +import { useEffect, useMemo, useState } from 'react' +import { RefreshCw } from 'lucide-react' +import ActionButton from '../ui/ActionButton' +import { fetchAPI } from '../../api/client' + +interface ProjectSummary { + id: string + name: string + status: string + tier: string | null + orientation: string + material: string + video_count: number + created_at: string +} + +interface ProjectStatus { + project: { id: string; name: string; status: string; material: string } + video: { id: string; title: string; orientation: string } + counts: { + refs_done: number + refs_total: number + images_done: number + images_total: number + videos_done: number + videos_total: number + upscales_done: number + upscales_total: number + tts_done?: number + tts_total?: number + downloads_done?: number + downloads_total?: number + } + queue: { pending: number; processing: number; failed: number } + characters: Array<{ id: string; name: string; entity_type: string; ready: boolean; media_id: string | null }> + scenes: Array<{ + id: string + display_order: number + prompt: string | null + narrator_text?: string | null + image_status: string + video_status: string + upscale_status: string + tts_status?: string + tts_audio_path?: string | null + download_ready?: boolean + download_path?: string | null + }> + suggested_next_action: string +} + +function CountPill({ label, value }: { label: string; value: string }) { + return ( + + {label} {value} + + ) +} + +export default function StatusDashboard() { + const [projects, setProjects] = useState([]) + const [projectId, setProjectId] = useState('') + const [status, setStatus] = useState(null) + const [autoRefresh, setAutoRefresh] = useState(true) + const [error, setError] = useState('') + + const loadProjects = async () => { + const res = await fetchAPI<{ projects: ProjectSummary[] }>('/api/workflows/status') + setProjects(res.projects) + if (!projectId && res.projects[0]) setProjectId(res.projects[0].id) + } + + const loadStatus = async () => { + if (!projectId) return + const res = await fetchAPI(`/api/workflows/status?project_id=${projectId}`) + setStatus(res) + } + + useEffect(() => { + loadProjects().catch((e: any) => setError(e.message ?? 'Không tải được danh sách dự án')) + }, []) + + useEffect(() => { + loadStatus().catch((e: any) => setError(e.message ?? 'Không tải được trạng thái')) + }, [projectId]) + + useEffect(() => { + if (!autoRefresh) return + const timer = setInterval(() => { + Promise.all([loadProjects(), loadStatus()]).catch(() => { }) + }, 10000) + return () => clearInterval(timer) + }, [autoRefresh, projectId]) + + const sceneRows = useMemo(() => (status?.scenes ?? []).slice(0, 80), [status]) + + return ( +
+
+ + + Promise.all([loadProjects(), loadStatus()]).catch(() => { })}> + Tải lại + + + + +
+ { + if (!projectId) return + await fetchAPI('/api/active-project', { method: 'PUT', body: JSON.stringify({ project_id: projectId }) }) + }}> + Đặt đang dùng + + { + await fetchAPI('/api/active-project', { method: 'DELETE' }) + }}> + Bỏ đang dùng + +
+ + {error &&
{error}
} + + {status && ( +
+
+ {status.project.name} · {status.video.title} · {status.video.orientation} +
+
+ + + + + {typeof status.counts.tts_total === 'number' && ( + + )} + {typeof status.counts.downloads_total === 'number' && ( + + )} + + +
+
+ )} + +
+
+
Thực thể
+
+ {(status?.characters ?? []).map(c => ( +
+
{c.name}
+
{c.ready ? 'Sẵn sàng' : 'Thiếu media_id'}
+
+ ))} +
+
+ +
+ + + + + + + + + + + + + + {sceneRows.map(s => ( + + + + + + + + + + ))} + +
#PromptẢnhVideoTTSUpscaleLocal
#{s.display_order + 1}{(s.prompt ?? '').slice(0, 72)}{s.image_status}{s.video_status}{s.tts_status ?? ((s.narrator_text ?? '').trim() ? 'PENDING' : 'SKIPPED')}{s.upscale_status}{s.download_ready ? 'READY' : '-'}
+
+
+
+ ) +} diff --git a/desktop/src/components/pipeline/BrandLogoModal.tsx b/desktop/src/components/pipeline/BrandLogoModal.tsx new file mode 100644 index 0000000..50d5e8a --- /dev/null +++ b/desktop/src/components/pipeline/BrandLogoModal.tsx @@ -0,0 +1,142 @@ +import { useEffect, useState } from 'react' +import { BadgeCheck, Image, Wand2 } from 'lucide-react' +import Modal from '../ui/Modal' +import ActionButton from '../ui/ActionButton' +import { fetchAPI } from '../../api/client' + +interface Props { + projectId: string + videoId: string + onClose: () => void +} + +interface ChannelItem { + name: string + icon_exists: boolean + intro_exists: boolean + outro_exists: boolean + badge_4k_exists: boolean +} + +interface BrandResult { + output_path: string + width: number + height: number + logo_size: number + logo_padding: number + intro_used?: string | null + outro_used?: string | null + badge_4k_applied: boolean + thumbnails: string[] +} + +export default function BrandLogoModal({ projectId, videoId, onClose }: Props) { + const [channels, setChannels] = useState([]) + const [channelName, setChannelName] = useState('') + const [size, setSize] = useState('') + const [applyThumbs, setApplyThumbs] = useState(false) + const [includeIntro, setIncludeIntro] = useState(true) + const [includeOutro, setIncludeOutro] = useState(true) + + const [loading, setLoading] = useState(false) + const [error, setError] = useState('') + const [result, setResult] = useState(null) + + useEffect(() => { + fetchAPI('/api/workflows/channels') + .then((rows) => { + setChannels(rows) + if (!channelName && rows[0]) setChannelName(rows[0].name) + }) + .catch(() => setChannels([])) + }, [channelName]) + + const run = async () => { + if (!channelName) return + setLoading(true) + setError('') + setResult(null) + try { + const r = await fetchAPI('/api/workflows/brand-logo', { + method: 'POST', + body: JSON.stringify({ + channel_name: channelName, + project_id: projectId, + video_id: videoId, + size: size.trim() ? Number(size) : undefined, + apply_thumbnails: applyThumbs, + include_intro: includeIntro, + include_outro: includeOutro, + }), + }) + setResult(r) + } catch (e: any) { + setError(e.message ?? 'Branding failed') + } finally { + setLoading(false) + } + } + + const selected = channels.find(c => c.name === channelName) + + return ( + +
+
+ Áp intro/outro + watermark logo + 4K badge lên video final của project. +
+ +
+
Channel
+ + {selected && ( +
+ Icon: {selected.icon_exists ? '✓' : '✗'} + Intro: {selected.intro_exists ? '✓' : '✗'} + Outro: {selected.outro_exists ? '✓' : '✗'} + 4K badge: {selected.badge_4k_exists ? '✓' : '✗'} +
+ )} +
+ +
+
+ + setSize(e.target.value)} className="input" placeholder="Auto" /> +
+
+ + + +
+
+ + {error &&
{error}
} + + {result && ( +
+
Branding completed
+
Output: {result.output_path}
+
Resolution: {result.width}x{result.height}
+
Logo: {result.logo_size}px (pad {result.logo_padding}px)
+
Intro: {result.intro_used ?? 'skipped'} · Outro: {result.outro_used ?? 'skipped'}
+
4K badge: {result.badge_4k_applied ? 'applied' : 'not applied'}
+ {result.thumbnails.length > 0 && ( +
Thumbnails branded: {result.thumbnails.length}
+ )} +
+ )} + +
+ Close + + {loading ? 'Processing...' : 'Apply Branding'} + +
+
+
+ ) +} diff --git a/desktop/src/components/pipeline/ChainVideosModal.tsx b/desktop/src/components/pipeline/ChainVideosModal.tsx new file mode 100644 index 0000000..0b67d2d --- /dev/null +++ b/desktop/src/components/pipeline/ChainVideosModal.tsx @@ -0,0 +1,160 @@ +/** + * ChainVideosModal + * Sets end_scene_media_id on CONTINUATION scenes for smooth transitions, + * then triggers batch video generation. + * Corresponds to CLI skill: fk:gen-chain-videos + */ +import { useState } from 'react' +import { Link2, CheckCircle, AlertTriangle, Film } from 'lucide-react' +import Modal from '../ui/Modal' +import ActionButton from '../ui/ActionButton' +import { fetchAPI, patchAPI } from '../../api/client' + +interface Scene { + id: string + display_order: number + chain_type: string + parent_scene_id: string | null + vertical_image_media_id: string | null + horizontal_image_media_id: string | null + vertical_video_status: string + horizontal_video_status: string +} + +interface ChainSetup { + scene: Scene + parent: Scene | null + endImageSet: boolean + status: 'ready' | 'missing_image' | 'not_continuation' +} + +interface Props { + videoId: string + projectId: string + orientation: string + onClose: () => void +} + +export default function ChainVideosModal({ videoId, projectId, orientation, onClose }: Props) { + const [chains, setChains] = useState([]) + const [loading, setLoading] = useState(false) + const [phase, setPhase] = useState<'idle' | 'setup' | 'generating' | 'done'>('idle') + const [error, setError] = useState('') + + const setupChains = async () => { + setLoading(true); setError('') + try { + const prefix = orientation === 'HORIZONTAL' ? 'horizontal' : 'vertical' + const scenes = await fetchAPI(`/api/scenes?video_id=${videoId}`) + const byId = Object.fromEntries(scenes.map(s => [s.id, s])) + const sorted = [...scenes].sort((a, b) => a.display_order - b.display_order) + + const setupList: ChainSetup[] = [] + for (const scene of sorted) { + if (scene.chain_type !== 'CONTINUATION') { + setupList.push({ scene, parent: null, endImageSet: false, status: 'not_continuation' }) + continue + } + const parent = scene.parent_scene_id ? byId[scene.parent_scene_id] : null + const parentImageMediaId = parent ? (parent as any)[`${prefix}_image_media_id`] : null + if (!parentImageMediaId) { + setupList.push({ scene, parent, endImageSet: false, status: 'missing_image' }) + continue + } + // Set end_scene_media_id to parent's image + await patchAPI(`/api/scenes/${scene.id}`, { + [`${prefix}_end_scene_media_id`]: parentImageMediaId, + }) + setupList.push({ scene, parent, endImageSet: true, status: 'ready' }) + } + setChains(setupList) + setPhase('setup') + } catch (err: any) { setError(err.message) } + finally { setLoading(false) } + } + + const genVideos = async () => { + setLoading(true); setError('') + try { + const scenes = await fetchAPI(`/api/scenes?video_id=${videoId}`) + const requests = scenes.map(s => ({ + type: 'GENERATE_VIDEO', + project_id: projectId, + video_id: videoId, + scene_id: s.id, + orientation, + })) + await fetchAPI('/api/requests/batch', { method: 'POST', body: JSON.stringify({ requests }) }) + setPhase('generating') + } catch (err: any) { setError(err.message) } + finally { setLoading(false) } + } + + const continuation = chains.filter(c => c.status === 'ready').length + const missing = chains.filter(c => c.status === 'missing_image').length + + return ( + +
+
+

Sets end_scene_media_id on CONTINUATION scenes so each video smoothly transitions into the next scene's visual world.

+

+ Prerequisite: All scenes must have generated images before chaining. +

+
+ + {phase === 'idle' && ( + + Setup Chains + + )} + + {error &&
{error}
} + + {chains.length > 0 && ( + <> +
+ ✓ {continuation} chains set + {missing > 0 && ⚠ {missing} missing images} + — {chains.filter(c => c.status === 'not_continuation').length} ROOT/standalone +
+ +
+ {chains.map(c => ( +
+ #{c.scene.display_order + 1} + {c.scene.chain_type} + {c.status === 'ready' && Chained} + {c.status === 'missing_image' && No image} + {c.status === 'not_continuation' && — standalone} +
+ ))} +
+ + {missing > 0 && ( +
+ ⚠ {missing} CONTINUATION scenes are missing parent images. Generate all scene images first, then re-run Setup Chains. +
+ )} + + {phase === 'setup' && continuation > 0 && ( + + Generate All Chain Videos ({chains.length} scenes) + + )} + {phase === 'generating' && ( +
+ ✓ Generation queued! Monitor progress in the Pipeline bar (Videos status bar). +
+ )} + + )} + +
+ Close +
+
+
+ ) +} diff --git a/desktop/src/components/pipeline/CreativeMixModal.tsx b/desktop/src/components/pipeline/CreativeMixModal.tsx new file mode 100644 index 0000000..eb61231 --- /dev/null +++ b/desktop/src/components/pipeline/CreativeMixModal.tsx @@ -0,0 +1,186 @@ +import { useEffect, useMemo, useState } from 'react' +import { Sparkles } from 'lucide-react' +import Modal from '../ui/Modal' +import ActionButton from '../ui/ActionButton' +import { fetchAPI } from '../../api/client' +import type { Scene } from '../../types' +import { normalizeOrientation } from '../../lib/orientation' + +interface Props { + projectId: string + videoId: string + orientation: string + onClose: () => void +} + +interface Suggestion { + id: string + afterSceneId: string + afterOrder: number + label: string + prompt: string + video_prompt: string + narrator_text: string + character_names: string[] +} + +function parseChars(raw: Scene['character_names']): string[] { + if (!raw) return [] + if (Array.isArray(raw)) return raw + try { + const parsed = JSON.parse(raw) + return Array.isArray(parsed) ? parsed : [] + } catch { + return [] + } +} + +function buildSuggestions(scenes: Scene[]): Suggestion[] { + const keywords = /(battle|fight|attack|reveal|explosion|escape|close[- ]up|dramatic|confront)/i + const out: Suggestion[] = [] + + scenes.forEach((scene, idx) => { + const prompt = `${scene.prompt ?? ''} ${scene.video_prompt ?? ''}`.trim() + const shouldPick = keywords.test(prompt) || idx % 3 === 1 + if (!shouldPick) return + + const charNames = parseChars(scene.character_names) + out.push({ + id: `mix-${scene.id}`, + afterSceneId: scene.id, + afterOrder: scene.display_order, + label: `Insert close-up after #${scene.display_order + 1}`, + prompt: 'Close-up cinematic angle of the previous action beat, keeping same characters and environment consistency.', + video_prompt: '0-3s: close-up detail reveal. 3-6s: subtle camera push-in. 6-8s: hold dramatic expression.', + narrator_text: scene.narrator_text ?? '', + character_names: charNames, + }) + }) + + return out.slice(0, 12) +} + +export default function CreativeMixModal({ projectId, videoId, orientation, onClose }: Props) { + const [scenes, setScenes] = useState([]) + const [selected, setSelected] = useState>({}) + const [queueImages, setQueueImages] = useState(true) + const [loading, setLoading] = useState(false) + const [error, setError] = useState('') + const [createdCount, setCreatedCount] = useState(0) + + const ori = normalizeOrientation(orientation) + + useEffect(() => { + fetchAPI(`/api/scenes?video_id=${videoId}`) + .then(rows => { + const sorted = rows.sort((a, b) => a.display_order - b.display_order) + setScenes(sorted) + const defaults: Record = {} + buildSuggestions(sorted).forEach(s => { defaults[s.id] = true }) + setSelected(defaults) + }) + .catch(() => setScenes([])) + }, [videoId]) + + const suggestions = useMemo(() => buildSuggestions(scenes), [scenes]) + + const apply = async () => { + setLoading(true) + setError('') + setCreatedCount(0) + + try { + const picked = suggestions.filter(s => selected[s.id]) + if (picked.length === 0) return + + const createdSceneIds: string[] = [] + for (const s of picked) { + const created = await fetchAPI<{ id: string }>('/api/scenes', { + method: 'POST', + body: JSON.stringify({ + video_id: videoId, + display_order: s.afterOrder + 1, + chain_type: 'INSERT', + parent_scene_id: s.afterSceneId, + source: 'system', + prompt: s.prompt, + video_prompt: s.video_prompt, + narrator_text: s.narrator_text || null, + character_names: s.character_names.length > 0 ? s.character_names : null, + }), + }) + createdSceneIds.push(created.id) + } + + if (queueImages && createdSceneIds.length > 0) { + await fetchAPI('/api/requests/batch', { + method: 'POST', + body: JSON.stringify({ + requests: createdSceneIds.map(sceneId => ({ + type: 'GENERATE_IMAGE', + project_id: projectId, + video_id: videoId, + scene_id: sceneId, + orientation: ori, + })), + }), + }) + } + + setCreatedCount(createdSceneIds.length) + } catch (e: any) { + setError(e.message ?? 'Failed to apply creative mix') + } finally { + setLoading(false) + } + } + + return ( + +
+
+ Đề xuất các INSERT scene (source=system) để tạo multi-angle/cutaway cinematic từ scene hiện có. +
+ + {suggestions.length === 0 ? ( +
No suitable scenes detected for creative mix suggestions.
+ ) : ( +
+ {suggestions.map(s => ( + + ))} +
+ )} + + + + {error &&
{error}
} + {createdCount > 0 && ( +
+ ✓ Created {createdCount} insert scene(s){queueImages ? ' and queued image generation' : ''}. +
+ )} + +
+ Close + + {loading ? 'Applying...' : 'Apply Creative Mix'} + +
+
+
+ ) +} diff --git a/desktop/src/components/pipeline/ExportModal.tsx b/desktop/src/components/pipeline/ExportModal.tsx new file mode 100644 index 0000000..c190277 --- /dev/null +++ b/desktop/src/components/pipeline/ExportModal.tsx @@ -0,0 +1,168 @@ +import { useState } from 'react' +import { Download, FolderOpen } from 'lucide-react' +import Modal from '../ui/Modal' +import ActionButton from '../ui/ActionButton' +import { fetchAPI } from '../../api/client' +import { loadGeneralSettings } from '../../api/ai-service' + +interface Props { + videoId: string + projectId: string + defaultOrientation?: string + onClose: () => void +} + +const ORIENTATIONS = [ + { id: 'VERTICAL', label: '📱 Dọc (9:16) — Shorts/Reels' }, + { id: 'HORIZONTAL', label: '🖥 Ngang (16:9) — YouTube' }, +] + +export default function ExportModal({ videoId, projectId, defaultOrientation = 'VERTICAL', onClose }: Props) { + const defaults = loadGeneralSettings() + const [orientation, setOrientation] = useState(defaultOrientation) + const [withNarrator, setWithNarrator] = useState(true) + const [withMusic, setWithMusic] = useState(false) + const [fitNarrator, setFitNarrator] = useState(false) + const [narratorBuffer, setNarratorBuffer] = useState(0.5) + const [status, setStatus] = useState<'idle' | 'exporting' | 'done' | 'error'>('idle') + const [outputPath, setOutputPath] = useState('') + const [exportDir, setExportDir] = useState('') + const [exportedImages, setExportedImages] = useState(0) + const [exportedVideos, setExportedVideos] = useState(0) + const [error, setError] = useState('') + + const exportVideo = async () => { + setStatus('exporting'); setError('') + try { + const r = await fetchAPI<{ + output_path?: string + export_dir?: string | null + exported_images?: number + exported_videos?: number + }>(`/api/videos/${videoId}/concat`, { + method: 'POST', + body: JSON.stringify({ + project_id: projectId, + orientation, + with_narrator: withNarrator, + with_music: withMusic, + fit_narrator: fitNarrator, + narrator_buffer: narratorBuffer, + export_root_dir: defaults.exportRootDir || null, + export_assets: true, + }), + }) + setOutputPath(r.output_path ?? '') + setExportDir(r.export_dir ?? '') + setExportedImages(r.exported_images ?? 0) + setExportedVideos(r.exported_videos ?? 0) + setStatus('done') + } catch (e: any) { + setStatus('error') + setError(e.message) + } + } + + const openFolder = async () => { + const target = exportDir || outputPath + if (!target) return + if (window.electron?.openPath) { + const result = await window.electron.openPath(target) + if (!result.ok && result.error) alert(result.error) + return + } + alert(`Đã xuất file tại:\n${target}`) + } + + return ( + +
+
+ Nối toàn bộ video cảnh thành file xuất cuối bằng FFmpeg. + Cần tạo video cho các cảnh trước khi xuất. +
+
+ Export folder: {defaults.exportRootDir ? {defaults.exportRootDir} : 'chưa cấu hình trong Cài đặt → Chung'} +
+ + {/* Orientation */} +
+
Tỉ lệ
+ {ORIENTATIONS.map(o => ( + + ))} +
+ + {/* Options */} +
+
Tùy chọn
+ + + {fitNarrator && ( +
+ Đệm narrator (giây) + setNarratorBuffer(Math.max(0, Number(e.target.value) || 0))} + className="input" + style={{ maxWidth: 100 }} + /> +
+ )} + +
+ + {error &&
{error}
} + + {status === 'exporting' && ( +
+
+ Đang nối video... có thể mất vài phút +
+ )} + + {status === 'done' && ( +
+ ✓ Xuất file thành công! + {outputPath && {outputPath}} + {exportDir && ( + <> + + Tài nguyên: {exportDir} + + + Đã xuất {exportedImages} ảnh · {exportedVideos} video cảnh + + + )} + + Mở thư mục + +
+ )} + +
+ Hủy + + Xuất video + +
+
+ + ) +} diff --git a/desktop/src/components/pipeline/FixUUIDsModal.tsx b/desktop/src/components/pipeline/FixUUIDsModal.tsx new file mode 100644 index 0000000..546858d --- /dev/null +++ b/desktop/src/components/pipeline/FixUUIDsModal.tsx @@ -0,0 +1,169 @@ +/** + * FixUUIDsModal + * Scans project entities and scenes for non-UUID media_ids (CAMS... format) + * and repairs them by extracting UUID from the corresponding URL. + * Corresponds to CLI skill: fk:fix-uuids + */ +import { useState } from 'react' +import { Wrench, CheckCircle, AlertTriangle } from 'lucide-react' +import Modal from '../ui/Modal' +import ActionButton from '../ui/ActionButton' +import { fetchAPI, patchAPI } from '../../api/client' +import { normalizeOrientation } from '../../lib/orientation' + +interface Fix { + resource: string + field: string + old: string + new: string + status: 'fixed' | 'error' + error?: string +} + +interface Props { + projectId: string + videoId: string + orientation?: string + onClose: () => void +} + +const UUID_RE = /^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$/i + +function isUUID(v: string | null | undefined): boolean { + return !!v && UUID_RE.test(v) +} + +function extractUUIDFromUrl(url: string | null | undefined): string | null { + if (!url) return null + // Google Storage: /image/{UUID}?... or /{type}/{UUID}?... + const m = url.match(/\/([0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12})/i) + return m ? m[1] : null +} + +export default function FixUUIDsModal({ projectId, videoId, orientation, onClose }: Props) { + const [running, setRunning] = useState(false) + const [fixes, setFixes] = useState([]) + const [scanned, setScanned] = useState(false) + const [error, setError] = useState('') + + const run = async () => { + setRunning(true); setError(''); setFixes([]) + const newFixes: Fix[] = [] + + try { + // ── Scan entities ────────────────────────────────────── + const chars = await fetchAPI(`/api/projects/${projectId}/characters`) + for (const c of chars) { + if (!isUUID(c.media_id) && c.media_id) { + const extracted = extractUUIDFromUrl(c.reference_image_url) + if (extracted) { + let status: 'fixed' | 'error' = 'fixed' + let errMsg: string | undefined + try { + await patchAPI(`/api/characters/${c.id}`, { media_id: extracted }) + } catch (e: any) { status = 'error'; errMsg = e.message } + newFixes.push({ resource: `Character: ${c.name}`, field: 'media_id', old: c.media_id, new: extracted, status, error: errMsg }) + } + } + } + + // ── Scan scenes ──────────────────────────────────────── + const scenes = await fetchAPI(`/api/scenes?video_id=${videoId}`) + const primary = normalizeOrientation(orientation) === 'HORIZONTAL' ? 'horizontal' : 'vertical' + const prefixes: Array<'vertical' | 'horizontal'> = primary === 'vertical' + ? ['vertical', 'horizontal'] + : ['horizontal', 'vertical'] + const FIELDS: [string, string][] = prefixes.flatMap((ori) => ([ + [`${ori}_image_media_id`, `${ori}_image_url`], + [`${ori}_video_media_id`, `${ori}_video_url`], + [`${ori}_upscale_media_id`, `${ori}_upscale_url`], + ])) + + for (const scene of scenes) { + for (const [field, urlField] of FIELDS) { + const val = scene[field] + if (!isUUID(val) && val) { + const extracted = extractUUIDFromUrl(scene[urlField]) + if (extracted) { + let status: 'fixed' | 'error' = 'fixed' + let errMsg: string | undefined + try { + await patchAPI(`/api/scenes/${scene.id}`, { [field]: extracted }) + } catch (e: any) { status = 'error'; errMsg = e.message } + newFixes.push({ resource: `Scene #${scene.display_order + 1}`, field, old: val, new: extracted, status, error: errMsg }) + } + } + } + } + + setFixes(newFixes) + setScanned(true) + } catch (err: any) { setError(err.message) } + finally { setRunning(false) } + } + + const fixed = fixes.filter(f => f.status === 'fixed').length + const errors = fixes.filter(f => f.status === 'error').length + + return ( + +
+
+ Scans all character and scene media IDs for invalid CAMS... format and repairs them + by extracting the correct UUID from the stored URL. +
+ + + {running ? 'Scanning...' : scanned ? 'Re-scan & Fix' : 'Scan & Fix All'} + + + {error &&
{error}
} + + {scanned && !running && ( + fixes.length === 0 ? ( +
+ All media IDs are already UUID format. No fixes needed. +
+ ) : ( +
+
+ ✓ {fixed} fixed + {errors > 0 && ✗ {errors} errors} +
+
+ + + + + + + + + + + {fixes.map((f, i) => ( + + + + + + + ))} + +
ResourceFieldNew UUIDStatus
{f.resource}{f.field.replace('vertical_', '').replace('horizontal_', '').replace('_media_id', '')}{f.new.slice(0, 8)}... + {f.status === 'fixed' + ? ✓ Fixed + : ✗ Error} +
+
+
+ ) + )} + +
+ Close +
+
+
+ ) +} diff --git a/desktop/src/components/pipeline/GenNarratorModal.tsx b/desktop/src/components/pipeline/GenNarratorModal.tsx new file mode 100644 index 0000000..c834a6d --- /dev/null +++ b/desktop/src/components/pipeline/GenNarratorModal.tsx @@ -0,0 +1,231 @@ +/** + * GenNarratorModal + * Uses AI (configurable provider) to write narrator_text for all scenes + * from the video's story/scene prompts, then PATCHes each scene. + * Corresponds to CLI skill: fk:gen-narrator + */ +import { useState } from 'react' +import { FileText, RefreshCw, CheckCircle } from 'lucide-react' +import Modal from '../ui/Modal' +import ActionButton from '../ui/ActionButton' +import { fetchAPI, patchAPI } from '../../api/client' +import { aiGenerate, loadGeneralSettings, type ProviderType } from '../../api/ai-service' + +const PROVIDERS: { id: ProviderType; label: string }[] = [ + { id: 'gemini', label: 'Gemini' }, + { id: 'claude', label: 'Claude' }, + { id: 'openai', label: 'OpenAI' }, + { id: 'deepseek', label: 'DeepSeek' }, +] + +interface Scene { + id: string + display_order: number + prompt: string | null + video_prompt: string | null + narrator_text: string | null + character_names: string[] | null +} + +interface SceneResult { + scene: Scene + status: 'pending' | 'generating' | 'done' | 'skipped' | 'error' + text: string + error?: string +} + +interface Props { + videoId: string + projectId: string + onClose: () => void +} + +const SYSTEM_PROMPT = `You are a documentary narrator writer. +Generate compelling, concise narrator text for a scene. +Respond with a JSON object: {"narrator_text": "..."} +Keep it to 1-3 short sentences max. +Never describe the visual — narrate the meaning, context, or emotion.` + +async function generateNarratorText( + scene: Scene, + language: string, + projectStory: string, + videoStory: string, + provider: ProviderType +): Promise { + const lang = language === 'vi' + ? 'Vietnamese' + : language === 'en' + ? 'English' + : language === 'es' + ? 'Spanish' + : language + const characterText = scene.character_names?.join(', ') || 'none' + const prompt = `Write narrator text in ${lang} for this documentary scene. + +Scene #${scene.display_order + 1} +Image prompt: ${scene.prompt ?? '(none)'} +Video motion: ${scene.video_prompt ?? '(none)'} +Characters: ${characterText} + +Project storyline context (first 600 chars): ${projectStory.slice(0, 600) || '(none)'} +Episode/video context (first 500 chars): ${videoStory.slice(0, 500) || '(none)'} + +Return JSON: {"narrator_text": "Your narrator text here"}` + + const result = await aiGenerate<{ narrator_text: string }>(prompt, SYSTEM_PROMPT, provider) + return result.narrator_text ?? '' +} + +export default function GenNarratorModal({ videoId, projectId, onClose }: Props) { + const defaults = loadGeneralSettings() + const [provider, setProvider] = useState(defaults.defaultProvider) + const [language, setLanguage] = useState(defaults.defaultLanguage) + const [forceOverwrite, setForceOverwrite] = useState(false) + const [results, setResults] = useState([]) + const [running, setRunning] = useState(false) + const run = async () => { + setRunning(true) + try { + const [scenes, video, project] = await Promise.all([ + fetchAPI(`/api/scenes?video_id=${videoId}`), + fetchAPI(`/api/videos/${videoId}`), + fetchAPI(`/api/projects/${projectId}`), + ]) + const projectStory = ((project?.story ?? project?.description ?? '') as string).trim() + const videoStory = (video?.description ?? '').trim() + + const initial: SceneResult[] = scenes.map(s => ({ + scene: s, + status: 'pending', + text: s.narrator_text ?? '', + })) + setResults(initial) + + for (let i = 0; i < scenes.length; i++) { + const scene = scenes[i] + // Skip if already has text and not force + const isInterview = (scene.character_names ?? []).some(name => name.toLowerCase().includes('interview')) + if (isInterview) { + setResults(prev => prev.map((r, idx) => idx === i ? { ...r, status: 'skipped', text: '(interview scene — skipped)' } : r)) + continue + } + if (scene.narrator_text && !forceOverwrite) { + setResults(prev => prev.map((r, idx) => idx === i ? { ...r, status: 'skipped', text: scene.narrator_text! } : r)) + continue + } + + setResults(prev => prev.map((r, idx) => idx === i ? { ...r, status: 'generating' } : r)) + try { + const text = await generateNarratorText(scene, language, projectStory, videoStory, provider) + await patchAPI(`/api/scenes/${scene.id}`, { narrator_text: text }) + setResults(prev => prev.map((r, idx) => idx === i ? { ...r, status: 'done', text } : r)) + } catch (err: any) { + setResults(prev => prev.map((r, idx) => idx === i ? { ...r, status: 'error', error: err.message } : r)) + } + } + } finally { setRunning(false) } + } + + const done = results.filter(r => r.status === 'done').length + const skipped = results.filter(r => r.status === 'skipped').length + const errors = results.filter(r => r.status === 'error').length + + return ( + +
+
+ AI writes narrator_text for each scene from scene prompts + story context. + Interview scenes are automatically skipped. +
+ + {/* Config */} +
+
+ + +
+
+ + +
+
+ +
+
+ + {results.length === 0 && ( + + Generate All Narrator Text + + )} + + {/* Progress summary */} + {results.length > 0 && ( +
+ ✓ {done} done + ⏭ {skipped} skipped + {errors > 0 && ✗ {errors} error} + {running && ⏳ generating...} +
+ )} + + {/* Scene results */} + {results.length > 0 && ( +
+ {results.map((r, i) => ( +
+ #{i + 1} +
+ {r.status === 'generating' && ( +
+ Generating... +
+ )} + {r.status === 'done' && ( + <> +
+ Done +
+
{r.text}
+ + )} + {r.status === 'skipped' && ( +
⏭ {r.text || 'Skipped (has text)'}
+ )} + {r.status === 'error' && ( +
✗ {r.error}
+ )} + {r.status === 'pending' && ( +
Waiting...
+ )} +
+
+ ))} +
+ )} + + {!running && results.length > 0 && ( +
+ + Re-run + + Done +
+ )} +
+
+ ) +} diff --git a/desktop/src/components/pipeline/GuideModal.tsx b/desktop/src/components/pipeline/GuideModal.tsx new file mode 100644 index 0000000..3a71185 --- /dev/null +++ b/desktop/src/components/pipeline/GuideModal.tsx @@ -0,0 +1,103 @@ +import Modal from '../ui/Modal' + +interface Props { + guide: 'camera' | 'thumbnail' + onClose: () => void +} + +const CAMERA_GUIDE = { + title: 'Camera Guide (fk:camera-guide)', + sections: [ + { + heading: 'Prompt Structure', + points: [ + 'Giữ prompt 100-150 từ, dạng prose tự nhiên.', + 'Câu camera movement tách riêng khỏi câu action.', + 'Kết prompt bằng Audio/SFX/Music + Negative: subtitles, watermark, text overlay.', + 'Với project dùng reference image, scene prompt tập trung ACTION + SETTING, không mô tả lại ngoại hình nhân vật.', + ], + }, + { + heading: 'Shot & Movement', + points: [ + 'Shot: EWS, WS, MS, CU, ECU, Macro.', + 'Movement: dolly in/out, pan, tilt, tracking, crane, handheld, whip pan, arc, POV, static.', + 'Mỗi shot nên có 1 movement chính để model bám sát.', + ], + }, + { + heading: 'Lighting & Style', + points: [ + 'Luôn chỉ định lighting: golden hour, low-key, backlight, volumetric, noir…', + 'Giữ style nhất quán theo sequence (color grade, lens, tone).', + 'Multi-shot 8s: tối ưu 2-3 góc, tránh nhồi quá nhiều cut.', + ], + }, + { + heading: 'Audio Labels', + points: [ + 'Audio: ambient liên tục (mưa, gió, phố).', + 'SFX: âm thanh sự kiện (footsteps, door slam, gun cock).', + 'Dialogue ngắn, tối đa 10-15 từ/nhân vật/2-3s segment.', + ], + }, + ], +} + +const THUMBNAIL_GUIDE = { + title: 'Thumbnail Guide (fk:thumbnail-guide)', + sections: [ + { + heading: 'Core Rules', + points: [ + 'Text 0-3 từ, chữ dày, đặt ở nửa trên ảnh.', + 'Gương mặt/subject chiếm 30-60% frame, cảm xúc mạnh.', + 'Màu tương phản cao, tránh palette nhạt xám.', + 'Một focal point rõ ràng, background đơn giản.', + ], + }, + { + heading: '6 Formula Gợi Ý', + points: [ + 'Reaction Face', + 'Before/After Split', + 'Contrast/Clash (small vs massive)', + 'Mystery/Reveal', + 'High Stakes Frame', + 'Number Punch', + ], + }, + { + heading: 'Technical', + points: [ + 'Kích thước chuẩn YouTube: 1280x720 (16:9).', + 'Safe zone: vùng trung tâm 90%.', + 'Tránh góc phải dưới vì YouTube đặt duration badge.', + 'Luôn test readability ở kích thước mobile.', + ], + }, + ], +} + +export default function GuideModal({ guide, onClose }: Props) { + const content = guide === 'camera' ? CAMERA_GUIDE : THUMBNAIL_GUIDE + + return ( + +
+ {content.sections.map(section => ( +
+
+ {section.heading} +
+
+ {section.points.map(point => ( +
• {point}
+ ))} +
+
+ ))} +
+
+ ) +} diff --git a/desktop/src/components/pipeline/MusicModal.tsx b/desktop/src/components/pipeline/MusicModal.tsx new file mode 100644 index 0000000..d30ffa2 --- /dev/null +++ b/desktop/src/components/pipeline/MusicModal.tsx @@ -0,0 +1,338 @@ +import { useEffect, useMemo, useState } from 'react' +import { Download, Music, RefreshCw, Scissors, VolumeX, Waves } from 'lucide-react' +import Modal from '../ui/Modal' +import ActionButton from '../ui/ActionButton' +import { fetchAPI } from '../../api/client' + +interface Props { + videoId: string + projectId: string + onClose: () => void +} + +interface TemplateItem { + id: string + name: string + category?: string + description?: string +} + +interface TaskClip { + id: string + title?: string + audioUrl?: string + audio_url?: string + duration?: number +} + +const MODELS = ['V4', 'V4_5', 'V4_5PLUS', 'V4_5ALL', 'V5', 'V5_5'] + +function extractClips(task: any): TaskClip[] { + const response = task?.response ?? task?.task?.response ?? {} + const clips = response?.sunoData || response?.data || task?.clips || [] + return Array.isArray(clips) ? clips : [] +} + +function extractLyrics(task: any): string { + const response = task?.response ?? task?.task?.response ?? task ?? {} + const candidates = [ + response?.lyrics, + response?.lyric, + response?.text, + response?.content, + response?.data?.lyrics, + response?.data?.text, + ] + for (const c of candidates) { + if (typeof c === 'string' && c.trim()) return c.trim() + } + return '' +} + +export default function MusicModal({ projectId, onClose }: Props) { + const [templates, setTemplates] = useState([]) + const [templateId, setTemplateId] = useState('') + + const [prompt, setPrompt] = useState('') + const [style, setStyle] = useState('') + const [title, setTitle] = useState('') + const [customMode, setCustomMode] = useState(true) + const [instrumental, setInstrumental] = useState(true) + const [model, setModel] = useState('V4') + + const [taskId, setTaskId] = useState('') + const [task, setTask] = useState(null) + const [loading, setLoading] = useState(false) + const [error, setError] = useState('') + + const [extendPrompt, setExtendPrompt] = useState('') + const [continueAt, setContinueAt] = useState('') + + const [credits, setCredits] = useState(null) + const [lyricsText, setLyricsText] = useState('') + + const clips = useMemo(() => extractClips(task), [task]) + + const loadMeta = async () => { + const [tpls, cre] = await Promise.all([ + fetchAPI('/api/music/templates').catch(() => []), + fetchAPI('/api/music/credits').catch(() => null), + ]) + setTemplates(tpls) + setCredits(cre) + if (!templateId && tpls[0]) setTemplateId(tpls[0].id) + } + + useEffect(() => { + loadMeta().catch(() => { }) + }, []) + + const refreshTask = async (id = taskId) => { + if (!id) return + const t = await fetchAPI(`/api/music/tasks/${id}`) + setTask(t) + setTaskId(id) + setLyricsText(extractLyrics(t)) + } + + const generate = async () => { + setLoading(true) + setError('') + setTask(null) + try { + const r = await fetchAPI<{ task_id: string; task?: any }>('/api/music/generate', { + method: 'POST', + body: JSON.stringify({ + prompt: prompt.trim(), + style: style.trim(), + title: title.trim(), + instrumental, + model, + custom_mode: customMode, + template_id: templateId || undefined, + poll: false, + }), + }) + setTaskId(r.task_id) + await refreshTask(r.task_id) + } catch (e: any) { + setError(e.message ?? 'Tạo nhạc thất bại') + } finally { + setLoading(false) + } + } + + const generateLyricsOnly = async () => { + if (!prompt.trim()) return + setLoading(true) + setError('') + setTask(null) + setLyricsText('') + try { + const r = await fetchAPI<{ task_id: string }>('/api/music/generate-lyrics', { + method: 'POST', + body: JSON.stringify({ + prompt: prompt.trim(), + template_id: templateId || undefined, + poll: false, + }), + }) + setTaskId(r.task_id) + await refreshTask(r.task_id) + } catch (e: any) { + setError(e.message ?? 'Tạo lời thất bại') + } finally { + setLoading(false) + } + } + + const pollUntilDone = async () => { + if (!taskId) return + setLoading(true) + setError('') + try { + const r = await fetchAPI(`/api/music/tasks/${taskId}/poll`, { method: 'POST' }) + setTask(r) + } catch (e: any) { + setError(e.message ?? 'Lấy trạng thái thất bại') + } finally { + setLoading(false) + } + } + + const downloadAll = async () => { + if (!taskId) return + setLoading(true) + setError('') + try { + const r = await fetchAPI(`/api/music/tasks/${taskId}/download?project_id=${projectId}`, { method: 'POST' }) + const n = r?.downloaded?.length ?? 0 + setError('') + alert(`Đã tải ${n} clip vào thư mục nhạc của dự án.`) + } catch (e: any) { + setError(e.message ?? 'Tải xuống thất bại') + } finally { + setLoading(false) + } + } + + const extendClip = async (audioId: string) => { + setLoading(true) + setError('') + try { + const r = await fetchAPI<{ task_id: string }>('/api/music/extend', { + method: 'POST', + body: JSON.stringify({ + audio_id: audioId, + prompt: extendPrompt.trim(), + continue_at: continueAt.trim() ? Number(continueAt) : undefined, + model, + poll: false, + }), + }) + setTaskId(r.task_id) + await refreshTask(r.task_id) + } catch (e: any) { + setError(e.message ?? 'Mở rộng clip thất bại') + } finally { + setLoading(false) + } + } + + const vocalRemoval = async (audioId: string) => { + if (!taskId) return + setLoading(true) + setError('') + try { + const r = await fetchAPI<{ task_id: string }>('/api/music/vocal-removal', { + method: 'POST', + body: JSON.stringify({ task_id: taskId, audio_id: audioId, poll: false }), + }) + setTaskId(r.task_id) + await refreshTask(r.task_id) + } catch (e: any) { + setError(e.message ?? 'Tách vocal thất bại') + } finally { + setLoading(false) + } + } + + const convertWav = async (audioId: string) => { + if (!taskId) return + setLoading(true) + setError('') + try { + const r = await fetchAPI<{ task_id: string }>('/api/music/convert-to-wav', { + method: 'POST', + body: JSON.stringify({ task_id: taskId, audio_id: audioId, poll: false }), + }) + setTaskId(r.task_id) + await refreshTask(r.task_id) + } catch (e: any) { + setError(e.message ?? 'Chuyển WAV thất bại') + } finally { + setLoading(false) + } + } + + return ( + +
+
+ Tác vụ: {taskId || '-'} + Trạng thái: {task?.status ?? '-'} + {credits && Credits: {JSON.stringify(credits)}} +
+ +
+
+
Mẫu / Model
+ + + + +
+ +
+
Tạo nhạc
+ setTitle(e.target.value)} placeholder="Tiêu đề" /> + setStyle(e.target.value)} placeholder="Tag phong cách (tùy chọn)" /> +