From 11ed713062d60524f3a168ad62de4be9672a8ddb Mon Sep 17 00:00:00 2001 From: Khaliq Date: Tue, 12 May 2026 11:46:02 +0200 Subject: [PATCH 1/4] =?UTF-8?q?feat(deploy):=20ship=20workforce=20deploy?= =?UTF-8?q?=20v1=20=E2=80=94=20persona-as-deployable-agent?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Extends PersonaSpec with deploy-time fields (cloud, useSubscription, integrations, schedules, sandbox, memory, traits, onEvent) and adds two new packages plus a CLI surface so a persona JSON deploys as a runnable agent end-to-end. Highlights - persona-kit: 8 new optional PersonaSpec fields, full parser coverage, KNOWN_TRIGGERS registry + lintTriggers helper (151 tests). - @agentworkforce/runtime: handler() wrapper, WorkforceCtx + WorkforceEvent discriminated union, gateway-envelope shim, startRunner driving NDJSON stdin, real GithubClient with retryable-status classification (15 tests). - @agentworkforce/deploy: orchestrator with preflight/connect/bundle/ launch steps, esbuild-driven bundle stager, child_process dev launcher with stdin piping + SIGTERM/SIGKILL stop, Daytona sandbox launcher (BYO DAYTONA_API_KEY), env-aware integration resolver (13 tests). - CLI: `workforce deploy` + `workforce login` cases wired into the existing dispatcher, foreground+detach handling for --mode dev. - examples/weekly-digest: real cron-driven persona that searches Brave, clusters by host, upserts a weekly GitHub issue. E2E verified: `workforce deploy ./examples/weekly-digest/persona.json --mode dev` builds the bundle, spawns the runner, dispatches an envelope on stdin, and streams structured logs back through the CLI. Plans authored: - docs/plans/deploy-v1.md (product plan) - docs/plans/deploy-v1-codex-spec.md (codex agent tasks) - docs/plans/deploy-v1-workflow-spec.md (Ricky cross-repo workflow) 361 tests pass across the 7 workspace packages. --mode cloud is the only explicitly-deferred surface (gated on the workforce-cloud deployments endpoint). Co-Authored-By: Claude Opus 4.7 (1M context) --- .gitignore | 2 + docs/plans/deploy-v1-codex-spec.md | 373 +++ docs/plans/deploy-v1-workflow-spec.md | 470 ++++ docs/plans/deploy-v1.md | 622 +++++ examples/weekly-digest/README.md | 55 + examples/weekly-digest/agent.ts | 228 ++ examples/weekly-digest/persona.json | 50 + packages/cli/package.json | 1 + packages/cli/src/cli.ts | 9 + packages/cli/src/deploy-command.ts | 170 ++ packages/deploy/package.json | 40 + packages/deploy/src/bundle.test.ts | 112 + packages/deploy/src/bundle.ts | 153 ++ packages/deploy/src/connect.ts | 196 ++ packages/deploy/src/deploy.test.ts | 344 +++ packages/deploy/src/deploy.ts | 215 ++ packages/deploy/src/index.ts | 31 + packages/deploy/src/io.ts | 84 + packages/deploy/src/login.ts | 42 + packages/deploy/src/modes/cloud.ts | 28 + packages/deploy/src/modes/dev.ts | 166 ++ packages/deploy/src/modes/sandbox.ts | 175 ++ packages/deploy/src/preflight.ts | 107 + packages/deploy/src/types.ts | 138 + packages/deploy/tsconfig.json | 8 + packages/persona-kit/package.json | 3 +- packages/persona-kit/src/index.ts | 27 + packages/persona-kit/src/parse.test.ts | 236 +- packages/persona-kit/src/parse.ts | 359 ++- packages/persona-kit/src/triggers.test.ts | 93 + packages/persona-kit/src/triggers.ts | 95 + packages/persona-kit/src/types.ts | 151 ++ packages/runtime/package.json | 50 + packages/runtime/src/clients/errors.ts | 35 + packages/runtime/src/clients/github.test.ts | 143 + packages/runtime/src/clients/github.ts | 232 ++ packages/runtime/src/clients/index.ts | 14 + packages/runtime/src/ctx.ts | 119 + packages/runtime/src/handler.ts | 43 + packages/runtime/src/index.ts | 59 + packages/runtime/src/raw.ts | 18 + packages/runtime/src/runner.test.ts | 138 + packages/runtime/src/runner.ts | 209 ++ packages/runtime/src/shim.test.ts | 91 + packages/runtime/src/shim.ts | 101 + packages/runtime/src/types.ts | 222 ++ packages/runtime/tsconfig.json | 8 + pnpm-lock.yaml | 2673 ++++++++++++++++++- 48 files changed, 8917 insertions(+), 21 deletions(-) create mode 100644 docs/plans/deploy-v1-codex-spec.md create mode 100644 docs/plans/deploy-v1-workflow-spec.md create mode 100644 docs/plans/deploy-v1.md create mode 100644 examples/weekly-digest/README.md create mode 100644 examples/weekly-digest/agent.ts create mode 100644 examples/weekly-digest/persona.json create mode 100644 packages/cli/src/deploy-command.ts create mode 100644 packages/deploy/package.json create mode 100644 packages/deploy/src/bundle.test.ts create mode 100644 packages/deploy/src/bundle.ts create mode 100644 packages/deploy/src/connect.ts create mode 100644 packages/deploy/src/deploy.test.ts create mode 100644 packages/deploy/src/deploy.ts create mode 100644 packages/deploy/src/index.ts create mode 100644 packages/deploy/src/io.ts create mode 100644 packages/deploy/src/login.ts create mode 100644 packages/deploy/src/modes/cloud.ts create mode 100644 packages/deploy/src/modes/dev.ts create mode 100644 packages/deploy/src/modes/sandbox.ts create mode 100644 packages/deploy/src/preflight.ts create mode 100644 packages/deploy/src/types.ts create mode 100644 packages/deploy/tsconfig.json create mode 100644 packages/persona-kit/src/triggers.test.ts create mode 100644 packages/persona-kit/src/triggers.ts create mode 100644 packages/runtime/package.json create mode 100644 packages/runtime/src/clients/errors.ts create mode 100644 packages/runtime/src/clients/github.test.ts create mode 100644 packages/runtime/src/clients/github.ts create mode 100644 packages/runtime/src/clients/index.ts create mode 100644 packages/runtime/src/ctx.ts create mode 100644 packages/runtime/src/handler.ts create mode 100644 packages/runtime/src/index.ts create mode 100644 packages/runtime/src/raw.ts create mode 100644 packages/runtime/src/runner.test.ts create mode 100644 packages/runtime/src/runner.ts create mode 100644 packages/runtime/src/shim.test.ts create mode 100644 packages/runtime/src/shim.ts create mode 100644 packages/runtime/src/types.ts create mode 100644 packages/runtime/tsconfig.json diff --git a/.gitignore b/.gitignore index 572e981..2001269 100644 --- a/.gitignore +++ b/.gitignore @@ -3,3 +3,5 @@ packages/*/dist/ *.tsbuildinfo .npm-cache .agent-relay +# workforce deploy bundle output (staged per-persona by `workforce deploy`) +**/.workforce/ diff --git a/docs/plans/deploy-v1-codex-spec.md b/docs/plans/deploy-v1-codex-spec.md new file mode 100644 index 0000000..fda0958 --- /dev/null +++ b/docs/plans/deploy-v1-codex-spec.md @@ -0,0 +1,373 @@ +# Codex agent spec — `workforce deploy` v1 + +You are implementing the parallelizable, mechanical pieces of the `workforce deploy` v1 feature. The product plan lives at `docs/plans/deploy-v1.md` — read it first, top to bottom, for context. **This file is your contract**: do exactly what it specifies, in the listed order of priority, opening one PR per numbered task against the `workforce` repo. + +You are working in parallel with a human engineer who owns the schema diff in `persona-kit`, the `@agentworkforce/runtime` core (`handler()` + ctx builder + shim), the deploy orchestrator's main flow, and the CLI dispatch case. **Treat their files as published interfaces — do not modify them.** If something you need is missing, leave a `TODO(human): exposed surface needed — ` comment and skip ahead. + +## Working agreement + +- **Branch per task.** One branch per numbered task. Naming: `codex/deploy-v1-` (e.g. `codex/deploy-v1-github-client`). +- **PR per task.** Title format: `feat(): `. Body links back to this spec section. +- **No schema decisions.** If a persona JSON field is ambiguous, re-read §3 of `deploy-v1.md`. If still ambiguous, surface in PR body — do not invent. +- **TypeScript only.** ESM modules. `"type": "module"`. Match existing workforce package style (`packages/persona-kit/package.json` is the reference). +- **No new dependencies without justification.** Prefer extending existing imports. If you need a new dep, state why in the PR body. +- **Tests required.** Each new file must have a `*.test.ts` next to it covering happy path + one error case. Use the existing test runner — check `packages/persona-kit` for the pattern. +- **Run `corepack pnpm run check` before declaring a task done.** If it's red, fix it before opening the PR. + +## Interfaces you can rely on (published by the human) + +By the time you start, these will exist: + +```ts +// from @agentworkforce/persona-kit (extended schema) +import type { PersonaSpec, IntegrationConfig, Schedule, SandboxConfig, MemoryConfig, Traits } from '@agentworkforce/persona-kit'; + +// from @agentworkforce/runtime (core) +import { handler, type WorkforceCtx, type WorkforceEvent, type IntegrationClients } from '@agentworkforce/runtime'; +import { buildCtx, type CtxBuildOptions } from '@agentworkforce/runtime/internal'; // internal subpath +``` + +If any of these aren't exported yet when you reach for them, leave the `TODO(human)` comment described above and move on. + +--- + +## Task 1 — Per-integration clients (HIGHEST PRIORITY) + +**Goal:** Concrete TS clients for each Relayfile provider, exposed on `WorkforceCtx` as `ctx.github`, `ctx.linear`, etc. + +**Files to create:** +- `packages/runtime/src/clients/github.ts` +- `packages/runtime/src/clients/linear.ts` +- `packages/runtime/src/clients/slack.ts` +- `packages/runtime/src/clients/notion.ts` +- `packages/runtime/src/clients/jira.ts` +- `packages/runtime/src/clients/index.ts` (barrel) +- `packages/runtime/src/clients/.test.ts` for each + +**Per-client contract:** + +```ts +export interface GithubClient { + comment(target: { owner: string; repo: string; number: number }, body: string): Promise<{ id: string; url: string }>; + createIssue(args: { owner: string; repo: string; title: string; body: string; labels?: string[] }): Promise<{ number: number; url: string }>; + upsertIssue(args: { owner: string; repo: string; title: string; body: string; labels?: string[]; matchTitle: string }): Promise<{ number: number; url: string; created: boolean }>; + getPr(target: { owner: string; repo: string; number: number }): Promise<{ title: string; body: string; diff: string; head: string; base: string; author: string }>; + postReview(target: { owner: string; repo: string; number: number }, args: { body: string; event: 'COMMENT' | 'APPROVE' | 'REQUEST_CHANGES'; comments?: Array<{ path: string; line: number; body: string }> }): Promise; +} + +export function createGithubClient(opts: { + connectionId: string; // from Relayfile + relayfileBaseUrl: string; +}): GithubClient; +``` + +Mirror this shape for the other providers. Method coverage per provider: + +| Provider | Methods (v1) | +|---|---| +| `github` | as above | +| `linear` | `createIssue`, `updateIssue`, `comment`, `getIssue` | +| `slack` | `post(channel, text)`, `reply(threadTs, text)`, `dm(user, text)` | +| `notion` | `createPage(parent, properties, content)`, `updatePage`, `getPage` | +| `jira` | `createIssue`, `comment`, `transition` | + +**Implementation pattern:** +- Auth: each call sends `Authorization: Bearer `; the connection-id resolves to a token at request time via Relayfile's `/api/v1/connections/:id/token` (check `@relayfile/sdk` for the exact helper — `RelayfileSetup` likely has one). +- Errors: throw `WorkforceIntegrationError` (define in `packages/runtime/src/errors.ts`) with `provider`, `operation`, `cause`, `retryable` fields. +- Don't implement retries here — the runtime's outer loop handles it. Just throw with `retryable: true` for 5xx and 429s. +- No SDK heavy lifting — use `fetch`. Each client should be <150 lines. + +**Acceptance:** +- All five client files compile and pass their tests. +- `WorkforceCtx`'s per-integration fields are populated by `ctx.ts` when the persona declares that integration. (The human owns `ctx.ts`; you just expose `createXxxClient` so it can call them.) + +**Effort:** ~2–3h total across all five (~30min each). + +--- + +## Task 2 — Bundle stager (`bundle.ts`) + +**Goal:** Pure file-staging function the deploy orchestrator calls to produce a runnable bundle in `.workforce/build//`. + +**File to create:** `packages/deploy/src/bundle.ts` (+ test) + +**Contract:** + +```ts +export interface BundleInput { + personaPath: string; // absolute path to the persona JSON + persona: PersonaSpec; // already-parsed + outDir: string; // .workforce/build/ + bundlerOptions?: { minify?: boolean }; +} + +export interface BundleResult { + personaCopyPath: string; // outDir/persona.json + runnerPath: string; // outDir/runner.mjs (entry) + bundlePath: string; // outDir/agent.bundle.mjs (esbuild'd agent.ts) + packageJsonPath: string; + sizeBytes: number; +} + +export async function stageBundle(input: BundleInput): Promise; +``` + +**What it does:** +1. Resolve `persona.onEvent` relative to `personaPath`. Verify file exists. +2. Esbuild the `onEvent` file as ESM bundle → `outDir/agent.bundle.mjs`. Bundle target `node20`, format `esm`, platform `node`, sourcemap `inline`, external all `node:*` plus `@agentworkforce/runtime/raw`. +3. Copy `persona.json` (the parsed object stringified) to `outDir/persona.json`. +4. Generate `outDir/runner.mjs` from this exact template: + ```js + import { startRunner } from '@agentworkforce/runtime/runner'; + import persona from './persona.json' assert { type: 'json' }; + import * as agentModule from './agent.bundle.mjs'; + const handler = agentModule.default ?? agentModule.handler; + startRunner({ persona, handler }); + ``` +5. Write `outDir/package.json` listing `@agentworkforce/runtime` at the workspace version. +6. Return `BundleResult` with byte size. + +**Dependencies allowed:** `esbuild`, `node:fs/promises`, `node:path`. Nothing else. + +**Acceptance:** +- Given `examples/weekly-digest/persona.json`, `stageBundle` produces a working bundle whose `runner.mjs` runs under `node runner.mjs` (assuming `@agentworkforce/runtime/runner` exists — if not, leave the runner template and TODO). +- Idempotent: running twice cleans and rewrites. +- Tested with a fixture persona under `packages/deploy/src/__fixtures__/`. + +**Effort:** ~1h. + +--- + +## Task 3 — `modes/dev.ts` — local long-lived runner + +**Goal:** Spawn the bundled runner as a child Node process, stream logs to stdout, hold the parent process open until SIGINT. + +**File to create:** `packages/deploy/src/modes/dev.ts` (+ test) + +**Contract:** + +```ts +export interface DevRunInput { + bundle: BundleResult; + env?: Record; + onLog?: (line: string) => void; +} + +export interface DevRunHandle { + pid: number; + stop(): Promise; + done: Promise<{ code: number; signal: NodeJS.Signals | null }>; +} + +export async function runDev(input: DevRunInput): Promise; +``` + +**Implementation:** +- Use `node:child_process.spawn('node', [bundle.runnerPath], { stdio: ['ignore', 'pipe', 'pipe'], env: { ...process.env, ...input.env } })`. +- Line-buffer stdout/stderr (avoid partial lines). Prefix each line with `[runtime]` before forwarding to `onLog` or `console.log` default. +- `stop()` sends SIGTERM, escalates to SIGKILL after 5s. +- Forward SIGINT on the parent: kill the child cleanly. +- `done` resolves on child exit. + +**Acceptance:** A unit test using a stub `runner.mjs` that prints "hello" and exits — verifies the log line is captured and `done` resolves with `code: 0`. + +**Effort:** ~45min. + +--- + +## Task 4 — `modes/sandbox.ts` — Daytona launcher + +**Goal:** Same shape as `runDev`, but launches inside a Daytona sandbox. + +**File to create:** `packages/deploy/src/modes/sandbox.ts` (+ test using a Daytona mock) + +**Contract:** + +```ts +export interface SandboxRunInput { + bundle: BundleResult; + sandboxConfig: SandboxConfig | true; + env?: Record; + onLog?: (line: string) => void; + daytona: { apiKey: string; jwtToken?: string; organizationId?: string }; +} + +export interface SandboxRunHandle { + sandboxId: string; + stop(): Promise; + done: Promise<{ code: number }>; +} + +export async function runSandbox(input: SandboxRunInput): Promise; +``` + +**Implementation:** +- `import { Daytona } from '@daytonaio/sdk'` (already used in `cloud/packages/core/src/runtime/daytona.ts` — same SDK). +- `new Daytona({ apiKey })`; `daytona.create({ language: 'typescript', envVars: input.env })`. +- `sandbox.fs.uploadFiles([...])` — upload the entire bundle directory recursively. +- `sandbox.process.executeCommand('node runner.mjs', '/home/user/project', input.env, input.sandboxConfig?.timeoutSeconds ?? 1800)`. +- Stream output. Daytona's `executeCommand` is final-result-only; for log streaming use `sandbox.process.createSession()` + `executeSessionCommand` with the streaming variant if available. If not (check SDK version), fall back to polling `executeCommand` output every 2s. Note the gap in the PR body. +- `stop()` calls `sandbox.delete()`. + +**Acceptance:** Test with a mocked `@daytonaio/sdk` that simulates create + exec + delete; assert call sequence. + +**Effort:** ~1.5h (Daytona SDK surface verification is most of the time). + +--- + +## Task 5 — Examples + +### 5a. `examples/weekly-digest/` + +**Files:** `examples/weekly-digest/persona.json`, `examples/weekly-digest/agent.ts`, `examples/weekly-digest/README.md`. + +**persona.json:** Use the shape from `deploy-v1.md` §7.1. `id: "weekly-digest"`. Schedule `0 9 * * 6`. GitHub integration only. Memory enabled with `workspace` scope. No sandbox config (defaults on). + +**agent.ts:** Default-export a `handler(...)`. On `event.source === 'cron'`: +1. Fetch search results from Brave Search API (env: `BRAVE_API_KEY`) for the topics in `persona.inputs.TOPICS` (define this input with a default list of 3 topics). +2. Dedupe + cluster by URL host. +3. Upsert one GitHub issue per week titled `Weekly digest — `, body listing clustered findings. Use `ctx.github.upsertIssue` with `matchTitle`. +4. Save a memory note: "digest published for week N" with `tags: ['digest', 'week:']`. + +Aim for ~80 lines. Keep it readable. + +### 5b. `examples/review-agent/` + +**Files:** `examples/review-agent/persona.json`, `examples/review-agent/agent.ts`, `examples/review-agent/README.md`. + +**persona.json:** From §7.2 of the plan. GitHub + Slack integrations. `useSubscription: true`. Memory enabled. Traits set. + +**agent.ts:** Dispatches on `event.source` + `event.type`: +- `github.pull_request.opened` → `ctx.harness.run({ prompt: \`Review this PR:\n${diff}\`, cwd: ctx.sandbox.cwd })` → `ctx.github.postReview(target, { event: 'COMMENT', body: result.output })`. +- `github.issue_comment.created` (matched as `@mention`) → reply with `ctx.github.comment` using harness output with thread context. +- `github.pull_request_review_comment.created` → similar reply. +- `github.check_run.completed` w/ failure → harness with the failed CI logs, post a comment with the proposed fix. +- `slack.app_mention` → conversational reply via memory + harness. Use `ctx.slack.reply`. + +Aim for ~120 lines. + +**Acceptance:** +- Both `agent.ts` files typecheck against `@agentworkforce/runtime`. +- Both `persona.json` parse via `parsePersonaSpec` without errors. +- Both READMEs document setup (which integrations to connect first, which env vars to set). + +**Effort:** ~1h for both. + +--- + +## Task 6 — Trigger registry expansion + +**File:** `packages/persona-kit/src/triggers.ts` (the human creates a stub; you fill it in). + +**Source of truth:** `/Users/khaliqgant/Projects/AgentWorkforce/relayfile/docs/` and the Relayfile adapter packages under `/Users/khaliqgant/Projects/AgentWorkforce/relayfile-adapters/`. + +For each Tier-1 provider (`github`, `linear`, `slack`, `notion`, `jira`), enumerate every event name the adapter normalizes. Output: + +```ts +export const KNOWN_TRIGGERS = { + github: ['pull_request.opened', 'pull_request.closed', /* ... */] as const, + linear: ['issue.created', /* ... */] as const, + // ... +} as const satisfies Record; + +export type ProviderName = keyof typeof KNOWN_TRIGGERS; +export type TriggerOf

= (typeof KNOWN_TRIGGERS)[P][number]; +``` + +Also expose a `lintTriggers(persona: PersonaSpec): TriggerLintIssue[]` function that returns warnings for unknown trigger names (don't throw). + +**Acceptance:** Each provider has ≥8 trigger names. `lintTriggers` returns `[]` for the two examples shipped in Task 5. + +**Effort:** ~45min. + +--- + +## Task 7 — JSON Schema export + persona fixtures + +**Files:** +- `packages/persona-kit/scripts/emit-schema.mjs` — emits JSON Schema for the extended `PersonaSpec` (use `ts-json-schema-generator` or `typescript-json-schema`; pick whichever has fewer transitive deps). +- `packages/persona-kit/schemas/persona.schema.json` — generated artifact, checked in. +- `packages/persona-kit/src/__fixtures__/personas/minimal.json` — bare persona (no cloud fields). +- `packages/persona-kit/src/__fixtures__/personas/cron-only.json` — cloud + schedules, no integrations. +- `packages/persona-kit/src/__fixtures__/personas/full.json` — every optional field populated. +- `packages/persona-kit/src/__fixtures__/personas/invalid-unknown-trigger.json` — should produce a lint warning. +- `packages/persona-kit/scripts/emit-schema.test.ts` — round-trips each fixture through the schema. + +Wire `emit-schema.mjs` to run as part of `pnpm run build` (or `prebuild`). + +**Acceptance:** Each fixture validates against the emitted schema. The script is idempotent. + +**Effort:** ~1h. + +--- + +## Task 8 — `examples/linear-shipper/` (paraglide pattern) + +**Files:** `examples/linear-shipper/persona.json`, `examples/linear-shipper/agent.ts`, `examples/linear-shipper/README.md`. + +**persona.json:** Headless (no traits). `cloud: true`. Linear + GitHub integrations. Trigger: `linear.issue.created`. Sandbox on. + +**agent.ts:** On `linear.issue.created`: +1. Pull issue body via `ctx.linear.getIssue`. +2. Clone the target repo into `ctx.sandbox.cwd` via `ctx.sandbox.exec('git clone ...')`. +3. `ctx.harness.run({ prompt: \`Implement this Linear issue:\n${issue.body}\`, cwd: ctx.sandbox.cwd })`. +4. Open a draft PR via `ctx.github.createIssue` (or `createPr` if exposed — add to GithubClient if needed and update Task 1's contract via a TODO comment). +5. Comment back on the Linear issue with the PR link via `ctx.linear.comment`. + +Aim for ~100 lines. + +**Acceptance:** persona.json parses; agent.ts typechecks. + +**Effort:** ~45min. + +--- + +## Task 9 — README rewrite + +**File:** `README.md` at the workforce repo root. + +Lead with the deploy story: "A persona is a deployable agent." Show `workforce deploy ./review-agent.json` as the headline example. Demote the existing local-CLI usage section to "Local agents" further down. + +Sections: +1. Quick start: `workforce deploy ./examples/weekly-digest/persona.json` +2. What a persona looks like (short JSON snippet) +3. Run modes (`--dev`, `--sandbox`, `--cloud`) +4. Integrations supported +5. Local agents (existing content) +6. Personas as packages (existing content) + +Keep marketing language minimal. Match the existing voice. + +**Acceptance:** Renders cleanly on GitHub. Links to the examples and to `docs/plans/deploy-v1.md`. + +**Effort:** ~30min. + +--- + +## Suggested execution order + +If you have one agent: 1 → 6 → 7 → 2 → 3 → 4 → 5 → 8 → 9. + +If you parallelize across multiple codex agents: +- Track A (independent): Task 1 (github + linear first, then slack/notion/jira). +- Track B (independent): Task 6, Task 7. +- Track C (depends on A's github+linear): Task 5, Task 8. +- Track D (depends on bundle.ts contract being agreed — read it from this spec): Task 2, Task 3, Task 4 sequentially. +- Track E (last): Task 9, after Tasks 1–5 are merged. + +Each track is its own PR series. No track waits on another's review. + +## When you are blocked + +- **Missing exported symbol from `@agentworkforce/runtime`?** Leave `TODO(human): need ` in code + flag in PR body. Don't speculate. +- **Disagreement with the plan?** Open a comment thread on the PR for `deploy-v1.md` — don't unilaterally change the contract. +- **Test failing for a reason you can't isolate?** Skip it with `it.skip(..., 'TODO(human): ')` and ship the rest of the task. Don't block a track on a flake. + +## Out of scope for you (the human owns these) + +- Schema types in `packages/persona-kit/src/types.ts` and the parser in `parse.ts`. +- `@agentworkforce/runtime` core: `handler()`, `WorkforceCtx`, `WorkforceEvent`, ctx builder, the `@agent-relay/agent` shim. +- `@agentworkforce/deploy` orchestrator entry (`index.ts`). +- CLI dispatch case in `packages/cli/src/cli.ts`. +- `workforce login` helper. +- This plan and spec files. diff --git a/docs/plans/deploy-v1-workflow-spec.md b/docs/plans/deploy-v1-workflow-spec.md new file mode 100644 index 0000000..09f26ac --- /dev/null +++ b/docs/plans/deploy-v1-workflow-spec.md @@ -0,0 +1,470 @@ +# Ricky workflow spec — `workforce deploy` v1 cross-repo work + +**Status:** ready for Ricky to generate + run a workflow. +**Resolves:** the cross-repo + cloud-side pieces of `workforce deploy` v1. +**Companion docs:** +- `/Users/khaliqgant/Projects/AgentWorkforce/workforce/docs/plans/deploy-v1.md` (product plan — read this first for context) +- `/Users/khaliqgant/Projects/AgentWorkforce/workforce/docs/plans/deploy-v1-codex-spec.md` (parallel codex agent's tasks — do not duplicate) +**Reference workflow (shape to mirror):** `/Users/khaliqgant/Projects/AgentWorkforce/cloud-proactive-runtime-spec/workflows/proactive-runtime-m1.ts` + +--- + +## How to consume this spec + +This file is self-contained. Ricky should generate one workflow TS file (suggested name `workforce-deploy-v1.ts`) that orchestrates **all four ready-now tracks** below in parallel, with the proactive-runtime-m1 conventions (preflight → implementer → self-reflection → soft/hard gates → commit → push → draft PR per track, plus a cross-repo integration test track). + +Two tracks are blocked and should be encoded as **separate** workflow files Ricky runs later (M3 and M6). Do not include them in the v1 workflow. + +### Run command (final workflow) + +```sh +npx tsx workflows/workforce-deploy-v1.ts +``` + +### Required env (repo paths) + +Resolve these as env vars with the defaults shown. Ricky should set these to absolute paths inside its sandbox. + +``` +HOME=/Users/khaliqgant +ROOT=$HOME/Projects/AgentWorkforce + +CLOUD_REPO=$ROOT/cloud +WORKFORCE_REPO=$ROOT/workforce +AGENT_ASSISTANT_REPO=$ROOT/agent-assistant +RELAYFILE_REPO=$ROOT/relayfile # read-only reference +RELAY_REPO=$ROOT/relay # read-only reference +``` + +### Required secrets + +``` +DAYTONA_API_KEY # for cloud-side endpoint smoke test +GITHUB_TOKEN # for opening PRs +WORKFORCE_E2E_STAGING_TOKEN # set in CI for the E2E job; not needed for the workflow itself +``` + +### Coordination shape + +Hub-spoke / Conversation. A lead Claude Opus stays on `#wf-workforce-deploy-v1` as architect + ambient reviewer; codex implementers work tracks in parallel and iterate based on lead feedback. The workforce repo has a human engineer (separately) landing the runtime core, schema diff, deploy orchestrator entry, and CLI dispatch case — **do not touch those files; consume them as published interfaces.** + +### Never-fail mechanics (mirror proactive-runtime-m1) + +- Every test / typecheck / regression gate runs as soft → fixer → hard. +- Two review rounds: peer review → signoff → router → fix-r2 → final signoff. If round 2 still has gaps, every PR opens as **DRAFT** with the gap list templated into the body. Workflow exits 0. +- Global `onError`: retry, 2 retries, 10s backoff. +- Self-checks built in: + - Per-track self-reflection vs the relevant track section below. + - Per-track self-review via soft/hard gate loop. + - Lead does ambient peer review during implementation. + - Reviewer agent does formal peer review off the cross-repo diff. + - Signoff agent verifies the v1 acceptance contract end-to-end. + - Router routes back to fixer if signoff is INCOMPLETE. + +### Branching, worktree, and PR conventions + +**One branch per track.** Names listed inline per track below. Base on `origin/main` at workflow start. + +**Worktrees are required when two or more tracks share a repo.** In this workflow: +- `$CLOUD_REPO` is used by Track A only → no worktree needed; operate in place on the branch. +- `$WORKFORCE_REPO` is shared by Tracks B, C, and INT → **each track operates in its own git worktree**. Path conventions: + + ``` + $WORKFORCE_REPO # Track B (consume Daytona) → branch feat/deploy-v1-daytona-consume + $WORKFORCE_REPO.wt-mcp # Track C (MCP server) → branch feat/mcp-workforce + $WORKFORCE_REPO.wt-e2e # Track INT (E2E test) → branch feat/deploy-v1-e2e + ``` + + Create with: + ```sh + cd $WORKFORCE_REPO + git fetch origin main + git worktree add $WORKFORCE_REPO.wt-mcp -b feat/mcp-workforce origin/main + git worktree add $WORKFORCE_REPO.wt-e2e -b feat/deploy-v1-e2e origin/main + ``` + + Each track's preflight command must `cd` into its worktree path and operate exclusively there. **Never `cd` into another track's worktree.** The workflow generator should encode the worktree path as a per-track constant, not as a mutable variable. + + **Cleanup:** Ricky **never** runs `git worktree remove`. Worktrees stay on disk until a human prunes them after PR merge. This keeps draft PRs reviewable against their own snapshot. + +**Preflight allow-list pattern.** Each track defines its allowed-dirty regex (the set of files Ricky may find dirty from a previous run). Anything outside the allow-list fails preflight to prevent clobbering in-flight human work. Mirror `proactive-runtime-m1.ts:115-122`. Suggested allow-lists are documented per track. + +**PR conventions (all tracks):** +- **Always open as DRAFT.** Even on green. Human flips to ready after review. +- **Base branch:** `main` in the target repo. +- **Title format:** `():

` — e.g. `feat(daytona-runner): extract DaytonaRuntime into publishable package`. Use scope = package name when adding/changing a package. +- **Body template:** + ```markdown + ## Summary + + + ## Spec + Resolves part of: `workforce/docs/plans/deploy-v1.md` + Track: in `workforce/docs/plans/deploy-v1-workflow-spec.md` + + ## Sibling PRs + - + + ## Acceptance + - [ ] + + ## Gaps (if any) + + + 🤖 Generated by Ricky workflow `workforce-deploy-v1` + ``` +- **No `--no-verify`.** Pre-commit hooks must pass. If they fail, fix the issue and create a new commit; do not amend or bypass. +- **Co-author trailer:** include `Co-Authored-By: via Ricky ` on each commit Ricky generates. + +**Cross-track dependencies:** +- Track B pins `@workforce/daytona-runner` to Track A's pre-release tag or branch. Ricky must wait for Track A to push at least one commit + tag before Track B's implementer starts. Encode as a `dependsOn` edge in the workflow DAG. +- Track INT waits for A, B, C all to be merged (or marked ready-for-review with green CI). Encode as a `dependsOn` edge. + +### Acceptance contract (workflow-level) + +After all four ready-now tracks complete, the following must be true: + +1. `@workforce/daytona-runner` is published to npm and importable from `workforce`. +2. `POST /api/v1/workspaces/:id/sandboxes` returns a usable sandbox handle when called with a workspace token. +3. `@agentworkforce/mcp-workforce` is published and a smoke test proves the harness can call `memory.save` + `memory.recall` round-trip. +4. The cross-repo integration test (Track INT) passes: a `weekly-digest` example deploys via `--mode sandbox`, fires a cron tick, posts to a fixture GitHub repo within 60s. + +--- + +## Tracks (ready now — include in `workforce-deploy-v1.ts`) + +Each track has its own preflight, install, implementer (codex/claude with model below), self-reflection, soft/hard gates (test + typecheck), and ends with commit + push + draft PR. + +### Track A — `@workforce/daytona-runner` package extraction + sandbox issuance endpoint + +**Repo:** `$CLOUD_REPO` (no worktree — single track per repo, operate in place) +**Implementer model:** codex (high reasoning). +**Working branch:** `feat/workforce-daytona-runner` +**Allowed-dirty regex:** `package-lock\.json|package\.json|pnpm-lock\.yaml|pnpm-workspace\.yaml|packages/daytona-runner/.*|packages/core/src/runtime/.*|packages/core/src/auth/.*|packages/web/app/api/v1/workspaces/.*sandboxes.*` +**PR title:** `feat(daytona-runner): extract DaytonaRuntime into publishable package + workforce sandbox endpoint` +**Rationale:** workforce's `--mode sandbox` consumes Daytona. The wrapper at `cloud/packages/core/src/runtime/daytona.ts` is battle-tested; extracting it into a publishable package is cleaner than copying. The sandbox issuance endpoint (~30 lines) means workforce users don't need their own Daytona account. + +**Preflight:** +- Verify `$CLOUD_REPO` is a valid clone, `main` is up to date. +- Verify `corepack pnpm install` runs clean. + +**Implementation steps:** + +1. Create `cloud/packages/daytona-runner/` as a publishable package named `@workforce/daytona-runner`. + - `package.json`: ESM, type: module, `"main": "./dist/index.js"`, `"types": "./dist/index.d.ts"`. Workspace version-aligned with rest of cloud. + - Source layout: + ``` + packages/daytona-runner/src/ + runtime.ts # the DaytonaRuntime class, moved verbatim + auth.ts # resolveDaytonaAuthCredentials, moved verbatim + types.ts # ExecOptions, ExecResult, LaunchOptions, RuntimeCapabilities, RuntimeHandle, WorkflowRuntime + index.ts # barrel exporting public surface + runtime.test.ts # smoke test (skipped without DAYTONA_API_KEY) + ``` + - Public exports from `index.ts`: + ```ts + export { DaytonaRuntime } from './runtime.js'; + export { resolveDaytonaAuthCredentials } from './auth.js'; + export type { RuntimeHandle, LaunchOptions, ExecOptions, ExecResult, RuntimeCapabilities, WorkflowRuntime } from './types.js'; + ``` + +2. Move (don't copy) `DaytonaRuntime` and its types out of `cloud/packages/core/src/runtime/daytona.ts` and `cloud/packages/core/src/auth/credentials.ts` into the new package. + +3. Re-export from the old locations to keep cloud's existing imports working: + ```ts + // cloud/packages/core/src/runtime/daytona.ts (now a re-export shim) + export { DaytonaRuntime, type RuntimeHandle, type LaunchOptions, type ExecOptions, type ExecResult } from '@workforce/daytona-runner'; + ``` + +4. Update `cloud/pnpm-workspace.yaml` to include the new package. + +5. Add an integration test in `packages/daytona-runner/src/runtime.test.ts`: + - Skips when `DAYTONA_API_KEY` is absent. + - When present: creates a sandbox, runs `node -e 'console.log("ok")'`, asserts output, destroys. + - Use the existing test runner / vitest config from cloud. + +6. Add `POST /api/v1/workspaces/:id/sandboxes` to the cloud API. Auth: workspace token (same primitive as other workspace-scoped endpoints — find it in `cloud/packages/web/app/api/v1/workspaces/` and mirror the pattern). + - Request body: + ```ts + { purpose: 'workforce-deploy'; personaId: string; label?: string; env?: Record; timeoutSeconds?: number } + ``` + - Handler: + ```ts + const auth = resolveDaytonaAuthCredentials({ + apiKey: process.env.DAYTONA_API_KEY, + jwtToken: process.env.DAYTONA_JWT_TOKEN, + organizationId: process.env.DAYTONA_ORGANIZATION_ID, + }); + const daytona = new Daytona(auth); + const sandbox = await daytona.create({ language: 'typescript', name: body.label, envVars: body.env }); + return { sandboxId: sandbox.id, jwtToken: , organizationId: auth.organizationId, expiresAt }; + ``` + - If the Daytona SDK cannot mint a per-sandbox JWT, ship instead as a proxy endpoint pair: + - `POST /api/v1/workspaces/:id/sandboxes/:sandboxId/exec` body `{ command, cwd?, env?, timeoutSeconds? }` + - `PUT /api/v1/workspaces/:id/sandboxes/:sandboxId/files` body `{ entries: Array<{ source: base64; destination: string }> }` + - Workforce CLI uses these via a thin "remote Daytona" client. Note the trade in the PR body. + - Audit-log every sandbox creation with `workspaceId`, `personaId`, `sandboxId`, `requester`. Use the existing audit-log primitive in cloud. + - Add `DELETE /api/v1/workspaces/:id/sandboxes/:sandboxId` for explicit teardown. + +7. Run the cloud test suite + typecheck. Soft → fixer → hard. + +8. Open draft PR. Body links to this spec + the parallel workforce-side consumption PR (Track B). + +**Acceptance:** +- New package compiles, lints, tests pass. +- `cloud`'s existing workflows still build (re-export shim is transparent). +- The sandbox endpoint returns a working handle in the integration test (when `DAYTONA_API_KEY` is set). +- PR is open as draft, body links spec. + +**Effort estimate:** ~3.5h. + +--- + +### Track B — workforce consumes `@workforce/daytona-runner` + sandbox endpoint + +**Repo:** `$WORKFORCE_REPO` (operate in place — this track owns the primary checkout; C and INT use worktrees) +**Implementer model:** codex (medium reasoning). +**Working branch:** `feat/deploy-v1-daytona-consume` +**Allowed-dirty regex:** `package\.json|pnpm-lock\.yaml|packages/deploy/.*|packages/cli/src/cli\.ts|examples/.*/README\.md` +**PR title:** `feat(deploy): use @workforce/daytona-runner + workforce-managed sandbox issuance` +**Depends on:** Track A's PR is in flight (does not need to be merged — Ricky pins to the branch / pre-release). + +**Preflight:** +- Verify `$WORKFORCE_REPO` is on a branch that contains the codex agent's `packages/deploy/src/modes/sandbox.ts` (from `deploy-v1-codex-spec.md` Task 4). If not present yet, **block this track** until that file lands; do not stub. + +**Implementation steps:** + +1. Add `@workforce/daytona-runner` to `packages/deploy/package.json` deps (pin to Track A's branch or pre-release tag). + +2. Rewrite `packages/deploy/src/modes/sandbox.ts` to use `DaytonaRuntime`: + ```ts + import { DaytonaRuntime } from '@workforce/daytona-runner'; + import { Daytona } from '@daytonaio/sdk'; + import { resolveDaytonaAuth } from '../daytona-auth.js'; + + export async function runSandbox(input: SandboxRunInput): Promise { + const auth = await resolveDaytonaAuth(); + const daytona = new Daytona(auth); + const runtime = new DaytonaRuntime({ daytona }); + const handle = await runtime.launch({ env: input.env, label: input.bundle.personaCopyPath }); + // upload bundle files… + // exec(`node runner.mjs`)… + return { sandboxId: handle.id, stop: () => runtime.destroy(handle), done }; + } + ``` + - Public function signature must match what the codex agent shipped (no changes to callers). + - **Do not** call `runtime.destroy(handle)` automatically when `exec` returns — workforce agents are long-lived. Only destroy on explicit `stop()`. + +3. Create `packages/deploy/src/daytona-auth.ts`: + ```ts + export interface DaytonaAuth { apiKey?: string; jwtToken?: string; organizationId?: string; } + + export async function resolveDaytonaAuth(): Promise { + if (process.env.DAYTONA_API_KEY) return { apiKey: process.env.DAYTONA_API_KEY }; + // workforce-managed path + const workforceCloudUrl = process.env.WORKFORCE_CLOUD_URL ?? 'https://cloud.agentworkforce.com'; + const workspaceToken = await loadWorkspaceToken(); // from keychain via existing workforce login flow + const res = await fetch(`${workforceCloudUrl}/api/v1/workspaces/${workspace.id}/sandboxes`, { + method: 'POST', + headers: { authorization: `Bearer ${workspaceToken}`, 'content-type': 'application/json' }, + body: JSON.stringify({ purpose: 'workforce-deploy', personaId, label, env, timeoutSeconds }), + }); + if (!res.ok) throw new Error(`sandbox issuance failed: ${res.status} ${await res.text()}`); + const { jwtToken, organizationId } = await res.json(); + return { jwtToken, organizationId }; + } + ``` + - `loadWorkspaceToken` lives in workforce's login module owned by the human engineer; if it isn't yet exported, leave `TODO(human): need loadWorkspaceToken export` and inline-stub with `process.env.WORKFORCE_WORKSPACE_TOKEN`. + +4. Add a `--byo-sandbox` CLI flag (in `packages/cli/src/cli.ts`'s deploy case) that forces BYO even when logged in. Mirrors `--no-connect` style. + +5. Update `examples/weekly-digest/README.md` and `examples/review-agent/README.md` to document both paths: + - BYO: `export DAYTONA_API_KEY=...` + - Workforce-managed: `workforce login` (no Daytona env needed) + +6. Run `corepack pnpm run check`. Soft → fixer → hard. + +7. Open draft PR. Body links to this spec + Track A's PR. + +**Acceptance:** +- `workforce deploy ./examples/weekly-digest/persona.json --mode sandbox` works in both auth paths (verify with a manual run if creds are available; otherwise stop at typecheck-green). +- Tests pass. +- PR open as draft. + +**Effort estimate:** ~1.5h. + +--- + +### Track C — `@agentworkforce/mcp-workforce` MCP server + +**Repo:** `$WORKFORCE_REPO.wt-mcp` (worktree — create with `git worktree add $WORKFORCE_REPO.wt-mcp -b feat/mcp-workforce origin/main` before this track starts) +**Implementer model:** codex (high reasoning). +**Working branch:** `feat/mcp-workforce` +**Allowed-dirty regex:** `package\.json|pnpm-lock\.yaml|pnpm-workspace\.yaml|packages/mcp-workforce/.*` +**PR title:** `feat(mcp-workforce): MCP server bridging harnesses to workforce primitives` +**Rationale:** when a persona's `onEvent` calls `ctx.harness.run(...)`, the harness (Claude Code / Codex / opencode) spawns inside the sandbox and needs tool access to workforce primitives (workflow.run, memory.save/recall, integration RPCs). MCP is the canonical contract. + +**Preflight:** +- Verify `$WORKFORCE_REPO` has `packages/runtime/src/clients/` populated. If not (codex Task 1 not merged yet), implement the **server skeleton + memory + workflow tools** now; **leave integration tools as TODO** with a placeholder that throws `not yet wired`. Open the PR anyway as draft. +- Verify `$AGENT_ASSISTANT_REPO` exposes `@agent-assistant/memory` (it does today — see `agent-assistant/packages/memory/src/memory.ts`). + +**Implementation steps:** + +1. Create `packages/mcp-workforce/` package. + - `package.json`: depends on `@modelcontextprotocol/sdk`, `@agent-assistant/memory`, `@agentworkforce/runtime` (workspace), `@agentworkforce/persona-kit` (workspace). + - Bin entry: `npx @agentworkforce/mcp-workforce` resolves to `dist/server.js`. + +2. Source layout: + ``` + packages/mcp-workforce/src/ + server.ts # MCP stdio server entry + tools/ + workflow.ts # workflow.run, workflow.status + memory.ts # memory.save, memory.recall + integrations.ts # integration.. + config.ts # reads WORKFORCE_PERSONA_PATH, WORKFORCE_RUNTIME_TOKEN, WORKFORCE_WORKSPACE_ID + index.ts + *.test.ts # one per tool file + ``` + +3. Server skeleton: use `@modelcontextprotocol/sdk` stdio transport. At startup, read env (`WORKFORCE_PERSONA_PATH`, `WORKFORCE_RUNTIME_TOKEN`, `WORKFORCE_WORKSPACE_ID`) and register the tool set below. The runtime sets these when spawning the harness via `ctx.harness.run`. + +4. Tools: + + | Tool | Args (Zod) | Returns | Backed by | + |---|---|---|---| + | `workflow.run` | `{ name: string; args: Record }` | `{ runId, status, output? }` | HTTP POST to `${WORKFORCE_CLOUD_URL}/api/v1/workflows/run` | + | `workflow.status` | `{ runId: string }` | `{ status, output?, error? }` | HTTP GET | + | `memory.save` | `{ content: string; tags?: string[]; scope?: 'session'\|'user'\|'workspace' }` | `{ ok: true }` | `@agent-assistant/memory` writeMemory | + | `memory.recall` | `{ query: string; limit?: number }` | `{ items: MemoryItem[] }` | `@agent-assistant/memory` query | + | `integration..` | varies | varies | delegates to `@agentworkforce/runtime/clients` (or throws "not yet wired" if codex Task 1 isn't merged) | + +5. Tests: + - Unit tests per tool, mocking the underlying memory adapter / integration clients. + - One integration test: spin up the server, send `memory.save` then `memory.recall`, assert round-trip. Skip when `SUPERMEMORY_API_KEY` is absent. + +6. Document the persona-side wiring in `packages/mcp-workforce/README.md`: + ```jsonc + // The workforce runtime injects this automatically when ctx.harness.run is called. + // Personas do not need to declare it — but for power users, it's: + "mcpServers": { + "workforce": { "command": "npx", "args": ["@agentworkforce/mcp-workforce"] } + } + ``` + +7. Run `corepack pnpm run check`. Soft → fixer → hard. + +8. Open draft PR. + +**Acceptance:** +- Package compiles, tests pass. +- Memory round-trip integration test passes when `SUPERMEMORY_API_KEY` is set. +- Workflow tools wired (HTTP calls smoke-tested against staging). +- Integration tools wired if codex Task 1 merged; otherwise TODO with clear error message. +- PR open as draft. + +**Effort estimate:** ~4h. + +--- + +### Track INT — Cross-repo integration test + +**Repo:** `$WORKFORCE_REPO.wt-e2e` (worktree — create with `git worktree add $WORKFORCE_REPO.wt-e2e -b feat/deploy-v1-e2e origin/main` before this track starts) +**Implementer model:** codex (medium reasoning). +**Working branch:** `feat/deploy-v1-e2e` +**Allowed-dirty regex:** `package\.json|pnpm-lock\.yaml|packages/deploy/test/.*|\.github/workflows/deploy-e2e\.yml|docs/plans/deploy-v1-e2e-fixtures\.md` +**PR title:** `test(deploy): cross-repo E2E for weekly-digest + review-agent` +**Depends on:** Tracks A, B, C, plus codex spec Tasks 1–5 merged. + +**Preflight:** +- Verify all dependencies have landed. If not, **block** this track; do not run until ready. +- Confirm fixture credentials are present: `WORKFORCE_E2E_STAGING_TOKEN`, GitHub PAT for `AgentWorkforce/deploy-e2e-fixtures`, Linear API key for the staging project. + +**Implementation steps:** + +1. Add `packages/deploy/test/e2e/` directory. Use vitest with a 5-minute test timeout. + +2. Test 1 — weekly-digest `--mode dev`: + - `deploy(persona, { mode: 'dev', noConnect: false })` against staging workspace. + - Simulate a `cron.tick` via the runtime's test hook (or wait up to 60s for the next tick if the schedule is dense in tests). + - Assert a GitHub issue exists on `AgentWorkforce/deploy-e2e-fixtures` with the expected title pattern. + - Cleanup: close the issue. + +3. Test 2 — review-agent `--mode sandbox`: + - `deploy(persona, { mode: 'sandbox' })` against staging workspace. + - Open a PR via the GitHub API on the fixture repo. + - Assert the agent posts a review within 90s. + - Cleanup: close the PR + destroy the sandbox via `DELETE /api/v1/workspaces/:id/sandboxes/:id`. + +4. Test 3 (only if codex spec Task 8 — `linear-shipper` example — is merged): + - Create a Linear issue via the API. + - Assert the agent clones + harness-runs + opens a PR + comments back on the Linear issue. + +5. Each test cleans up after itself. + +6. Add `.github/workflows/deploy-e2e.yml` running `pnpm run test:e2e` on a nightly schedule + manual dispatch. Failures notify `#workforce-alerts`. + +7. Document fixture setup in `docs/plans/deploy-v1-e2e-fixtures.md`: which repo, which Linear project, which Slack workspace. + +**Acceptance:** +- All applicable tests pass once. +- Nightly CI job is green. +- Fixture-setup doc committed. + +**Effort estimate:** ~5h. + +--- + +## Tracks (blocked — separate workflow files when unblocked) + +Do **not** include these in `workforce-deploy-v1.ts`. Encode each as its own file under `workflows/` when the blocker clears. + +### Track CLOUD — `--cloud` deploy mode wiring (blocked on cloud proactive-runtime M4) + +**Why blocked:** cloud proactive-runtime M4 is the milestone that adds `POST /api/v1/workspaces/:id/deployments` (the "accept a persona bundle, host it" endpoint). Until M4 lands, workforce's `--mode cloud` flag prints "not yet available." + +**Workflow filename when unblocked:** `workflows/workforce-deploy-cloud-mode.ts`. + +**Repos touched when unblocked:** +- `$CLOUD_REPO` — implement the deployments endpoint per M4 spec; reuse Durable Object infra from M1's agent-gateway. +- `$WORKFORCE_REPO` — replace `packages/deploy/src/modes/cloud.ts` stub with real POST + status polling. + +**Acceptance when unblocked:** +- `workforce deploy ./examples/weekly-digest/persona.json --mode cloud` produces a hosted agent that fires from cloud, not the user's machine. +- `workforce deployments list` and `workforce deployments destroy ` work. + +### Track BILL — Billing meter for workforce-managed sandboxes (post-v1) + +**Why deferred:** the sandbox endpoint ships in Track A with audit logging. Wiring the audit log into the workspace billing meter is mechanical but needs platform-team alignment on meter naming. + +**Workflow filename when scheduled:** `workflows/workforce-deploy-billing.ts`. + +**Repo touched:** `$CLOUD_REPO` only. + +**Acceptance:** +- Sandbox minutes appear on the workspace billing dashboard. + +### Track DOCS — Documentation site updates (after codex tasks 6/7/9 + human schema diff merge) + +**Why deferred:** docs lift from the JSON Schema export (codex Task 7), trigger registry (codex Task 6), and README (codex Task 9). They must merge first. + +**Workflow filename when scheduled:** `workflows/workforce-deploy-docs.ts`. + +**Repo touched:** the AgentWorkforce docs site (resolve `$DOCS_REPO` when the workflow runs). + +**Acceptance:** +- Concept doc, quickstart, persona schema reference, per-provider trigger reference, and runtime handler API page are live. + +--- + +## Out of scope for Ricky + +- Anything inside `$WORKFORCE_REPO/packages/persona-kit`, `packages/runtime` core, `packages/deploy/src/index.ts` entry, or `packages/cli/src/cli.ts` — the human engineer owns these. Consume as published interfaces. +- The codex agent's tasks (`docs/plans/deploy-v1-codex-spec.md`) — that agent runs separately. +- `deploy-v1.md`, this spec, and the codex spec — do not modify the plans. + +## When Ricky is blocked + +- **Daytona SDK can't mint per-sandbox JWTs?** Pivot Track A part B to the proxy-endpoint variant described in the spec. Note the trade in the PR body. Do not block. +- **A codex Track 1 interface doesn't match Track C's expectations?** Open a comment thread on the codex Task 1 PR. Stub against the documented contract in `deploy-v1-codex-spec.md`. Open Track C PR as draft with the diff in the body. +- **Cloud test suite flake?** Use the soft → fixer → hard gate loop; if it flakes twice, quarantine the test, file a follow-up issue, exit 0 with draft PR. diff --git a/docs/plans/deploy-v1.md b/docs/plans/deploy-v1.md new file mode 100644 index 0000000..1da14e0 --- /dev/null +++ b/docs/plans/deploy-v1.md @@ -0,0 +1,622 @@ +# Plan — `workforce deploy` v1 + +Status: draft for review +Owner: workforce +Target: shipped today (Phase 1 vertical slice) +Depends on: cloud proactive-runtime M1 (assumed shipping today), Daytona creds in workforce cloud env + +--- + +## 1. Thesis + +A workforce **persona is a deployable agent**. Today a persona JSON describes how to launch a harness in the user's terminal. After this work, the same persona JSON — with a few new fields — also describes a cloud agent that listens for events (GitHub PR opened, Linear issue created, `@mention`, cron tick), runs inside a Daytona sandbox, can invoke its harness for real work, retains memory, and posts back through Slack/Relaycast/GitHub. + +The user-facing command is one line: + +``` +workforce deploy ./review-agent.json +``` + +That command does everything: validates schema, prompts the user through integration OAuth, registers triggers, bundles the agent code, and starts a long-lived runner. The persona JSON is the single source of truth. + +This unifies three product surfaces that today live as separate things: +- The local persona/harness story (`workforce agent `) +- The sage-style addressable assistant (Slack/Relaycast inbox) +- The proactive-agents-style scheduled/event-driven worker + +One file. One command. One contract. + +--- + +**Companion docs:** +- `workforce/docs/plans/deploy-v1-workflow-spec.md` — Ricky cross-repo execution spec (worktree layout, branch names, PR templates, blocked tracks). +- `workforce/docs/plans/deploy-v1-codex-spec.md` — parallel codex implementer tasks (do not duplicate from this plan). + +--- + +## 2. Scope cut for today + +### In + +- Persona JSON schema extension: `cloud`, `useSubscription`, `integrations`, `schedules`, `sandbox`, `memory`, `traits`, `onEvent`. +- New package `@agentworkforce/runtime` — thin facade exposing `handler(...)` that wraps `agent({...})` from `@agent-relay/agent` (cloud proactive-runtime M1 SDK). +- New package `@agentworkforce/deploy` — the deploy CLI logic; the existing `cli.ts` gets a `deploy` case that dispatches to it. +- Daytona sandbox launcher used in the `--sandbox` run mode. +- Integration connect via `@relayfile/sdk` (`RelayfileSetup.connectIntegration`) and provider connect via `@agent-relay/cloud` (`connectProvider`) when `useSubscription: true`. +- Run modes: + - `--dev` — long-lived local Node process connecting to `agent-gateway` (no sandbox). + - `--sandbox` — Daytona sandbox runs the bundle; default when Daytona creds resolve. + - `--cloud` — POST bundle to a workforce cloud deploy endpoint. **Stubbed today** (endpoint lands in proactive-runtime M4). The flag exists, prints "not yet available; cloud-hosted deploy lands with M4." +- Two reference examples shipped in `examples/`: + - `examples/review-agent/` — GitHub PR review + autofix + - `examples/weekly-digest/` — cron, Brave search → GitHub issue +- `workforce dry-run` extension that validates the new fields and lints the integration trigger names. + +### Out (Phase 2+) + +- Declarative integration routing (`links: [{ from: "github.issue.opened", to: "slack.post" }]`). Linking happens in `onEvent` code for v1. +- Multi-tenant cloud-hosted deploy (blocked on cloud proactive-runtime M4). +- Persona schema for "personas as a service" marketplace metadata (pricing, install count, etc.). +- A web UI for managing deployed agents — CLI only for now. +- Migrating sage / sales / nightcto to this contract — they remain as-is; this is greenfield. + +--- + +## 3. Persona JSON schema diff + +All new fields are optional. A persona that does not set any of them continues to behave exactly as today — `workforce agent ` works unchanged. Set `cloud: true` and at least one trigger to opt into the new deploy surface. + +### 3.1 Top-level additions + +| Field | Type | Required when | Meaning | +|---|---|---|---| +| `cloud` | `boolean` | always (default `false`) | When `true`, this persona is deployable. `workforce deploy` only operates on personas where this is `true`. | +| `useSubscription` | `boolean` | optional | When `true`, inference uses the user's connected LLM subscription via `@agent-relay/cloud`'s provider link (no workforce-billed tokens). Triggers a `connectProvider` step at deploy time. | +| `integrations` | `Record` | when persona has event triggers | Declares which Relayfile providers this agent needs and what events fire its handler. See §3.2. | +| `schedules` | `Schedule[]` | when persona runs on cron | One or more cron triggers, registered with the runtime's `ctx.schedule.every(...)`. Each schedule has a `name` echoed back to the handler. See §3.3. | +| `sandbox` | `boolean \| SandboxConfig` | optional | `true` (default) means agent runs inside a Daytona sandbox. `false` means the runner process owns its own filesystem. Object form lets you tune env / timeout. See §3.4. | +| `memory` | `boolean \| MemoryConfig` | optional | Enables the agent-assistant memory subsystem. Scopes and TTL configurable. See §3.5. | +| `traits` | `Traits` | optional, **only meaningful for interactive agents** | Mirrors `@agent-assistant/traits`: voice, formality, proactivity, etc. Applied when the agent posts to a chat surface (Slack, Relaycast). Headless agents (paraglide-style "Linear issue → ship") may omit this. See §3.6. | +| `onEvent` | `string` | when `cloud: true` and any trigger declared | Path to a TS file (relative to the persona JSON) whose default export is the event handler. Sub-file references like `./agent.ts` and `./handlers/index.ts` are supported. See §4. | + +### 3.2 `integrations` shape + +```jsonc +"integrations": { + "github": { + "scope": { "repo": "AgentWorkforce/workforce" }, // optional; provider-specific filter + "triggers": [ + { "on": "pull_request.opened" }, + { "on": "issue_comment.created", "match": "@mention" }, // match is a sugar lint, see §3.7 + { "on": "pull_request_review_comment.created" }, + { "on": "check_run.completed", "where": "conclusion=failure" } + ] + }, + "linear": { "triggers": [{ "on": "issue.created" }] }, + "slack": { "triggers": [{ "on": "app_mention" }] }, + "notion": { "scope": { "database": "..." }, "triggers": [{ "on": "page.updated" }] } +} +``` + +Key choices: +- **Key is the Relayfile provider slug.** `github`, `linear`, `slack`, `notion`, `jira`. The deploy step calls `RelayfileSetup.connectIntegration({ allowedIntegrations: [key] })` for any provider not yet connected to the user's workspace. +- **`triggers[]` is a flat list per provider** — multiple events from the same provider all fan into the same `onEvent`. The handler discriminates on `event.source` + `event.type`. +- **`match` and `where` are sugars** — `match: "@mention"` is shorthand for "filter to events that mention the deployed agent." The deploy CLI lints them against a known set; unknown values warn but don't fail. We can always upgrade the runtime to enforce them later. +- **`scope` is optional and provider-specific.** Validated by the deploy CLI against a small provider-schema map. For v1, supported keys are documented per provider in the examples. + +The act of stacking integrations is just declaring multiple keys. The act of linking them ("when GitHub fires, post to Slack") is code in `onEvent`. We considered a declarative `links:` block — see §11.4 for why we deferred it. + +### 3.3 `schedules` shape + +```jsonc +"schedules": [ + { "name": "weekly-digest", "cron": "0 9 * * 6", "tz": "UTC" }, + { "name": "stale-prs", "cron": "0 9 * * 1-5", "tz": "America/New_York" } +] +``` + +- `name` is required and unique within the persona; it surfaces as `event.name` to disambiguate inside `onEvent`. +- `cron` is a standard 5-field expression. `tz` defaults to `UTC`. +- Multiple schedules are allowed. The runtime registers each with `ctx.schedule.every(cron, { tz, payload: { name } })`. + +### 3.4 `sandbox` shape + +```jsonc +"sandbox": true // default +"sandbox": { "enabled": true, "timeoutSeconds": 1800, "env": { "FOO": "bar" } } +"sandbox": false // run in the runner process's fs +``` + +- Image is **not** user-configurable in v1. Workforce picks a standard image (`node-22` baseline) for the default Daytona sandbox. We can add `image` later if a real demand surfaces; eliminating the field keeps the v1 contract small. +- `timeoutSeconds` caps a single handler invocation. Default 1800s. +- `env` adds env vars on top of the auto-injected secrets (Relayfile connection tokens, harness inference creds, etc.). +- When `sandbox: false`, the agent's `ctx.sandbox` still exists but points at the runner's own process — useful for `--dev` iteration, **not** what we recommend for production. + +### 3.5 `memory` shape + +```jsonc +"memory": true // sensible defaults +"memory": { + "enabled": true, + "scopes": ["session", "user", "workspace"], + "ttlDays": 30, + "autoPromote": true, + "dedupMs": 300000 +} +``` + +- Implementation: the runtime wires `@agent-assistant/memory` with the supermemory adapter (matching sage today). API key is pulled from workforce-managed env, not declared in the persona. +- `scopes` is the only field with real semantic weight: session-only memory is wiped per handler; user-scope persists across the user's invocations of this agent; workspace persists across all users. +- `autoPromote` flips on the sage turn-recorder pattern — agent decides if session content is worth promoting. +- **No `memoryMd` file.** Memory is config, not prose. Personality goes in `traits` and `description`. + +### 3.6 `traits` shape + +Direct mapping to `@agent-assistant/traits`: + +```jsonc +"traits": { + "voice": "professional-warm", + "formality": "low", + "proactivity": "medium", + "riskPosture": "conservative", + "domain": "engineering", + "vocabulary": ["PR", "diff", "CI"], + "preferMarkdown": true +} +``` + +Only used when the runtime renders into a conversational surface (Slack message, Relaycast post, GitHub PR comment). Skip the field entirely for headless agents — saves the runtime a subsystem registration. + +### 3.7 Trigger-name registry + +`packages/persona-kit/src/triggers.ts` (new) ships a small registry of known trigger names per provider so the deploy CLI can lint them: + +```ts +export const KNOWN_TRIGGERS = { + github: ["pull_request.opened", "pull_request.synchronize", + "issue_comment.created", "pull_request_review_comment.created", + "check_run.completed", "workflow_run.completed", "issues.opened"], + linear: ["issue.created", "issue.updated", "comment.created"], + slack: ["app_mention", "message.channels"], + // ... +} as const; +``` + +Unknown trigger names log a yellow warning but don't fail deploy. The cloud runtime is the source of truth; we don't want to be a gating bottleneck. + +--- + +## 4. Runtime substrate — `@agentworkforce/runtime` + +A new, intentionally thin package. Single export: `handler(...)`. + +```ts +// @agentworkforce/runtime +import { agent } from '@agent-relay/agent'; // PR #515 M1 + +type WorkforceEvent = + | { source: 'cron'; name: string; firedAt: string } + | { source: 'github'; type: GithubTrigger; ...payload } + | { source: 'linear'; type: LinearTrigger; ...payload } + | { source: 'slack'; type: SlackTrigger; ...payload }; + +interface WorkforceCtx { + // Inference, either workforce-billed or via the user's subscription + llm: { complete(prompt: string, opts?: LlmOpts): Promise }; + + // Spawn the persona's declared harness inside the sandbox + harness: { + run(args: { prompt: string; cwd?: string; tier?: 'best'|'best-value'|'minimum' }): Promise; + }; + + // Per-integration auth-wrapped clients (only those declared in persona.integrations) + github?: GithubClient; + linear?: LinearClient; + slack?: SlackClient; + notion?: NotionClient; + jira?: JiraClient; + + // Daytona sandbox (or process fs if sandbox:false) + sandbox: { + cwd: string; // absolute path inside the sandbox + exec(cmd: string, opts?: { cwd?: string; env?: Record }): Promise; + readFile(path: string): Promise; + writeFile(path: string, contents: string): Promise; + }; + + // Memory (agent-assistant memory, wired per persona.memory) + memory: { + save(content: string, opts?: { tags?: string[]; scope?: MemoryScope }): Promise; + recall(query: string, opts?: { limit?: number }): Promise; + }; + + // Workflow invocation — the persona-as-orchestrator escape hatch (§6) + workflow: { + run(name: string, args: Record): Promise; + }; + + // Schedule control — for handlers that want to schedule one-off followups + schedule: { + at(when: Date, payload: unknown): Promise; + cancel(name: string): Promise; + }; + + // Persona metadata (id, traits, harness tier defaults, etc.) — read-only + persona: PersonaSpec; +} + +export function handler( + fn: (ctx: WorkforceCtx & { [k in I]: NonNullable }, event: WorkforceEvent) => Promise +): WorkforceHandler; +``` + +Implementation notes: +- `handler(...)` reads the persona JSON adjacent to the entrypoint (workforce bundles them together). At cold-start it: + 1. Calls `agent({ workspace, schedule, watch, inbox, onEvent: shim })` from `@agent-relay/agent`, mapping `persona.integrations` to `watch` and `persona.schedules` to `schedule`. + 2. Builds `ctx` once per agent boot: opens Daytona handle (if `sandbox: true`), wires Relayfile-derived clients, attaches memory adapter. + 3. The `shim` reshapes the raw envelope from `@agent-relay/agent` into the `WorkforceEvent` discriminated union and invokes the user's `fn(ctx, event)`. +- The user never imports `@agent-relay/agent` directly. Workforce owns the ergonomics. If the underlying SDK churns, we absorb the diff here. +- The SDK doors stay open for power users: we re-export `agent` from `@agentworkforce/runtime/raw` so anyone who wants the lower-level surface can drop down. This matters for nightcto-shaped projects that outgrow the persona contract. + +The runtime package has **zero runtime dependencies on the CLI**. It can be installed standalone in any Node project to write an agent without `workforce deploy`. That keeps the personas-as-code escape hatch clean. + +--- + +## 5. Deploy CLI — `@agentworkforce/deploy` + +New package. Exports `deploy(persona, opts): Promise`. The existing `packages/cli/src/cli.ts` adds: + +```ts +case 'deploy': await runDeploy(argv); break; +case 'login': await runLogin(argv); break; // small new helper for cloud auth +``` + +`runDeploy` is a ~150-line orchestrator over the deploy package. Public flags: + +``` +workforce deploy + [--mode dev|sandbox|cloud] # default: sandbox if Daytona creds present, else dev + [--workspace ] # workforce workspace; defaults to active + [--no-connect] # skip integration prompts; fail if any are missing + [--detach] # background the runner + [--bundle-out ] # emit bundle without launching + [--dry-run] # validate only +``` + +Flow: + +1. **Resolve persona**: load the JSON via `parsePersonaSpec` (extended schema). Fail fast on schema errors with field-pointed messages. +2. **Login check**: if no workforce auth token in keychain, prompt `workforce login` (browser OAuth via existing relayauth flow). +3. **Workspace check**: ensure user has a workspace; offer to create one (`relay workspaces create ` semantics, called via SDK not subprocess). +4. **Integrations**: for each `persona.integrations` key, check if connected to the active workspace. If not, **prompt the user before each** (`Connect github now? (Y/n)`). On yes, call `RelayfileSetup.connectIntegration({ allowedIntegrations: [key] })` and open the browser. Block until callback. On no, fail with a clear message. +5. **Subscription** (if `useSubscription: true`): call `connectProvider({ provider: })` from `@agent-relay/cloud`. Pick provider from the persona's primary tier harness (claude → anthropic, codex → openai, opencode → user choice). +6. **Schedules**: register each `persona.schedules[i]` with the runtime — for `--dev` and `--sandbox`, schedules are registered via the runtime SDK at boot; for `--cloud` (when M4 lands), they're part of the bundle metadata. +7. **Bundle**: stage to `.workforce/build//`: + - `persona.json` (the spec) + - `agent.ts` (the user's `onEvent` file, possibly transpiled) + - `runner.ts` (generated; calls `handler(...)` and starts the runtime) + - `package.json` (with `@agentworkforce/runtime` and any user-declared deps) +8. **Launch**: + - `--dev`: `node .workforce/build//runner.ts` in the foreground (or detached). + - `--sandbox`: spin up Daytona sandbox, upload the bundle, `daytona.exec("node runner.ts")`. Stream logs back to stdout. + - `--cloud`: print "not yet available; cloud-hosted deploy lands with proactive-runtime M4." Bundle is left in `.workforce/build/` for inspection. +9. **Print status**: agent ID, workspace, integrations connected, schedules registered, runner mode, log tail command. + +`--bundle-out ` writes the bundle and exits. Useful for CI and for the future `--cloud` mode. +`--dry-run` validates schema + lints triggers + checks integration connection status, no side effects. + +--- + +## 6. Harness + workflow bridge + +The user explicitly asked: "within the harness definition can we call for a workflow to be run?" + +Two levels of integration: + +### 6.1 Harness as an LLM-driven tool runner + +When `onEvent` calls `ctx.harness.run({ prompt: "Review this diff", cwd: ctx.sandbox.cwd })`, the runtime spawns the persona's declared tier (claude/codex/opencode) **inside** the sandbox, with the sandbox cwd as the harness's working directory. The harness has: +- Filesystem access to the sandbox's mounted workspace. +- Network access per the persona's `harnessSettings` (`workspaceWriteNetworkAccess`). +- The persona's declared `skills`, `mcpServers`, and `permissions` materialized as the harness expects. +- Optionally, an MCP server we ship — `mcp__workforce` — exposing `workflow.run`, `memory.save`, `memory.recall`, and the per-integration clients. The harness can call these as tools mid-run, without re-architecting around the workforce SDK. + +That's how a "review PR" handler can let Claude Code (or Codex) drive the entire review autonomously: the handler hands the harness the diff, the harness reads files, runs tests in the sandbox, drafts comments, and returns. The handler then posts the comments via `ctx.github.comment`. + +### 6.2 Workflows as first-class invocations + +`ctx.workflow.run("name", args)` is the escape hatch for the heavy machinery in `cloud/workflows`. Inside a handler — or from within the harness via the `mcp__workforce` MCP — you can kick off a multi-step workflow. The workflow runs in cloud (its native habitat) and returns a handle; the handler can `await handle.completion()` or fire-and-forget. + +Examples: +- `review-agent` invokes a `pr-review-multi-tier` workflow that runs three independent reviewers and synthesizes. +- `My-Senior-Dev`-shaped agents invoke `code-explore` + `propose-edits` + `verify` workflows in sequence. + +For v1 the workflow client is a thin HTTP wrapper around the cloud workflows endpoint. Authentication piggybacks on the workspace token already loaded for `agent-gateway`. + +This is the bridge: **personas declare** the integration surface and the handler; **handlers orchestrate**; **workflows execute**. None of these need to know about the others' internals. + +--- + +## 7. Examples to ship today + +### 7.1 `examples/weekly-digest/` + +Direct port of the proactive-agents weekly-digest pattern. + +`persona.json`: +```json +{ + "id": "weekly-digest", + "intent": "research", + "tags": ["analytics"], + "description": "Weekly competitive intel digest — searches the web and Reddit for mentions, dedupes, posts a single GitHub issue.", + "cloud": true, + "integrations": { "github": { "scope": { "repo": "AgentWorkforce/weekly-digest" } } }, + "schedules": [{ "name": "weekly", "cron": "0 9 * * 6", "tz": "UTC" }], + "sandbox": true, + "memory": { "enabled": true, "scopes": ["workspace"], "ttlDays": 90 }, + "onEvent": "./agent.ts", + "tiers": { ... standard codex/opencode tiers ... } +} +``` + +`agent.ts`: ~80 lines. Brave search → cluster → upsert GitHub issue. + +### 7.2 `examples/review-agent/` + +`persona.json`: +```json +{ + "id": "review-agent", + "intent": "review", + "tags": ["review"], + "description": "Reviews opened PRs, responds to @mentions in comments, attempts autofix on red CI.", + "cloud": true, + "useSubscription": true, + "integrations": { + "github": { + "triggers": [ + { "on": "pull_request.opened" }, + { "on": "issue_comment.created", "match": "@mention" }, + { "on": "pull_request_review_comment.created" }, + { "on": "check_run.completed", "where": "conclusion=failure" } + ] + }, + "slack": { "triggers": [{ "on": "app_mention" }] } + }, + "sandbox": true, + "memory": { "enabled": true, "scopes": ["session", "workspace"] }, + "traits": { "voice": "professional-warm", "formality": "low", "preferMarkdown": true }, + "onEvent": "./agent.ts", + "tiers": { ... } +} +``` + +`agent.ts`: ~120 lines. Dispatches on `event.type`: +- `pull_request.opened` → `ctx.harness.run({ prompt: "Review", cwd })` → post review +- `issue_comment.created` + `@mention` → harness with thread context → reply +- `check_run.completed` + failure → harness with logs → propose fix patch +- `slack.app_mention` → conversational reply using memory + +--- + +## 8. Package layout — diff + +``` +workforce/ +├── packages/ +│ ├── cli/ # add `deploy`, `login` cases +│ ├── persona-kit/ # extend PersonaSpec schema (§3) +│ │ └── src/ +│ │ ├── types.ts # +CloudFields, +IntegrationConfig, +Schedule, +Sandbox, +Memory, +Traits +│ │ ├── parse.ts # extend parsePersonaSpec to read new fields +│ │ └── triggers.ts # NEW — known triggers registry (§3.7) +│ ├── harness-kit/ # no changes for v1 +│ ├── workload-router/ # no changes for v1 +│ ├── deploy/ # NEW — @agentworkforce/deploy +│ │ └── src/ +│ │ ├── index.ts # deploy(persona, opts) entry +│ │ ├── login.ts # workspace login helper +│ │ ├── connect.ts # integration + provider connect orchestration +│ │ ├── bundle.ts # stage bundle to .workforce/build/ +│ │ ├── modes/ +│ │ │ ├── dev.ts # local long-lived process +│ │ │ ├── sandbox.ts # Daytona launch +│ │ │ └── cloud.ts # M4-stub +│ │ └── daytona.ts # thin Daytona wrapper (or import from cloud) +│ └── runtime/ # NEW — @agentworkforce/runtime +│ └── src/ +│ ├── index.ts # exports handler(), types +│ ├── ctx.ts # builds WorkforceCtx per invocation +│ ├── clients/ # per-integration auth-wrapped clients +│ │ ├── github.ts +│ │ ├── linear.ts +│ │ ├── slack.ts +│ │ └── ... +│ ├── memory.ts # wraps @agent-assistant/memory +│ ├── workflow.ts # cloud workflows HTTP client +│ ├── shim.ts # @agent-relay/agent envelope → WorkforceEvent +│ └── raw.ts # re-exports for power users +├── examples/ +│ ├── weekly-digest/ # NEW +│ │ ├── persona.json +│ │ └── agent.ts +│ └── review-agent/ # NEW +│ ├── persona.json +│ └── agent.ts +└── docs/ + └── plans/ + └── deploy-v1.md # this file +``` + +Persona schema diffs are non-breaking. Existing personas in `personas/persona-maker.json` etc. continue to work unchanged. + +--- + +## 9. Work split + +You asked for an explicit multi-faceted split. Here's the breakdown. + +### 9.1 I implement directly (this session) + +The work that needs codebase fluency, schema decisions, and inline iteration: + +1. **Schema diff in `persona-kit`** — `types.ts`, `parse.ts`, `triggers.ts`. Includes unit tests for the new field shapes. +2. **`@agentworkforce/runtime` skeleton** — `index.ts`, `ctx.ts`, `shim.ts`, types. Stubs for clients/memory/workflow that compile and return typed placeholders. Actual client implementations slotted in via §9.2 / §9.3. +3. **`@agentworkforce/deploy` skeleton** — `index.ts`, `bundle.ts`, `modes/dev.ts` (the simplest run mode). Login + connect orchestration with `--no-connect` fallback for testing. +4. **CLI wiring** — `deploy` and `login` cases in `cli.ts`, plus `--dry-run` and `--bundle-out` flags. +5. **`examples/weekly-digest/`** — fully working against the `--dev` runner. End-to-end demo path. +6. **`examples/review-agent/`** — persona JSON + agent.ts skeleton; full behavior depends on per-integration clients (see §9.2). +7. **Docs** — extend `README.md` with a `## Deploying agents` section pointing at the examples. + +Estimate: aggressive 4–6 hours given existing surface area. + +### 9.2 Workflow (Ricky) — cross-repo PRs + +Full execution detail lives in `deploy-v1-workflow-spec.md`. This plan owns the *what*; the workflow spec owns the *how*. + +**Ready-now tracks (one PR per track, all in one Ricky workflow run):** +- **Track A** — extract `@workforce/daytona-runner` + add `POST /api/v1/workspaces/:id/sandboxes` in `$CLOUD_REPO`. +- **Track B** — workforce consumes `@workforce/daytona-runner` + workforce-managed sandbox auth path. +- **Track C** — `@agentworkforce/mcp-workforce` MCP server (workflow/memory/integration tools). +- **Track INT** — cross-repo E2E (`weekly-digest --mode dev`, `review-agent --mode sandbox`, optional `linear-shipper`). + +**Blocked tracks (separate workflow files when unblocked):** +- **Track CLOUD** — `--cloud` mode wiring (blocked on cloud proactive-runtime M4). +- **Track BILL** — billing meter for workforce-managed sandboxes (post-v1). +- **Track DOCS** — documentation site updates (after codex Tasks 6/7/9 + human schema-diff merge). + +If a track slips, §10's fallback applies: ship `--dev` end-to-end with `weekly-digest`; `review-agent` becomes next-milestone. + +### 9.3 Codex agent (independent, parallelizable) + +Tasks that are mechanical, well-specified, and don't gate on my decisions — perfect for a codex agent spawned via `workforce agent code-implementer` or a similar persona: + +1. **Trigger registry expansion** — fill out `packages/persona-kit/src/triggers.ts` with the full set of known trigger names per Tier-1 provider (Linear, GitHub, Slack, Notion, Jira) by reading the Relayfile provider docs in `/Users/khaliqgant/Projects/AgentWorkforce/relayfile/docs/`. +2. **Test fixtures** — generate sample `persona.json` files exercising every optional combination (with/without traits, sandbox false, multi-schedule, etc.) into `packages/persona-kit/src/__fixtures__/`. +3. **JSON Schema export** — emit a JSON Schema from the extended `PersonaSpec` for editor autocomplete. New script: `packages/persona-kit/scripts/emit-schema.mjs`. Wire to `pnpm run build` so it ships with the package. +4. **Example expansion** — write a third example, `examples/linear-shipper/` (the paraglide pattern: Linear issue created → drive to PR), purely against the runtime substrate I land in §9.1. +5. **README polish** — once the deploy command is real, codex agent rewrites the workforce README to lead with the deploy story. + +Each item is a self-contained PR for codex to handle in parallel with my main thread. + +--- + +## 10. Today's milestones (chronological) + +| When | Milestone | Owner | Gates | +|---|---|---|---| +| T+0 | Plan reviewed + signed off | user | this doc | +| T+30m | Persona schema diff merged-to-branch | me | typechecks green, fixtures pass | +| T+1h | `@agentworkforce/runtime` skeleton compiles | me | runtime imports + `handler()` types check | +| T+2h | `@agentworkforce/deploy` `--dev` end-to-end | me | `workforce deploy examples/weekly-digest/persona.json --mode dev` runs and logs cron tick | +| T+2.5h | `examples/weekly-digest` posts to a real test GitHub repo | me | demo-ready | +| T+3h | `--sandbox` Daytona mode lights up | me + workflow | depends on Daytona-runner-package PR landing | +| T+4h | `review-agent` example end-to-end against a test PR | me + workflow | depends on per-integration clients | +| T+5h | Codex-agent tasks merged | codex agent | parallel tracks | +| EOD | PR opened, draft for review | me | final pass, screenshots, README | + +If §9.2 (Daytona-runner-package + per-integration clients) slips, we ship `--dev` only and demo weekly-digest end-to-end. `review-agent` becomes "next milestone" — but the persona contract still ships. + +--- + +## 11. Open questions / risks + +### 11.1 Cloud proactive-runtime M1 timing + +Plan assumes `@agent-relay/agent` (M1's SDK) is importable today. If M1 is in flight but not yet published, the runtime substrate falls back to a temporary shim that talks to `agent-gateway` over a hand-rolled WebSocket using the M1 spec's envelope shape. This is a half-day of extra work and an explicit tech-debt note. **Verify before T+0.** + +### 11.2 Daytona auth — BYO and workforce-managed, both v1 + +Two paths, both ship in v1: + +- **BYO**: if `DAYTONA_API_KEY` is set in the user's env, the CLI uses it directly. Zero cloud dependency. Useful for power users / CI. +- **Workforce-managed**: if not set, the CLI calls `POST /api/v1/workspaces/:id/sandboxes` against the cloud API with the user's workspace token. Cloud holds the org-level `DAYTONA_API_KEY` (already exists as an SST secret at `cloud/infra/secrets.ts:23`), calls `daytona.create()`, returns `{ sandboxId, jwtToken, organizationId, expiresAt }`. The CLI then constructs a Daytona client with `{ jwtToken, organizationId }` — auth path the SDK already supports (see `cloud/packages/core/src/auth/credentials.ts`). + +This used to be deferred to a follow-up; it isn't. The endpoint is ~30 lines on the cloud side, it reuses primitives that already exist, and it removes the need to document "you also need a Daytona account" in the v1 quickstart. Moved into Ricky's M1 (Milestone 1) so it ships alongside the Daytona runner extract. + +**Long-lived process lifecycle (verified):** `DaytonaRuntime` does not auto-destroy. Cloud's executor calls `destroy()` explicitly at step end (`cloud/packages/core/src/executor/executor.ts:1029`). Workforce deploy simply never calls `destroy()` — the sandbox persists until the user runs `workforce deployments destroy ` (Milestone 3) or we add an idle-timeout sweeper. + +**Restart story (open):** if the runner process inside the sandbox crashes, the OS process dies but the sandbox itself remains. A small supervisor loop inside `runner.mjs` (`while (true) { await runAgent().catch(log) }`) covers transient crashes. If the entire sandbox dies (rare; Daytona-side incident), the CLI's `--detach` mode is fire-and-forget — we'd need a "workforce deployments tail" command later to detect death and re-spin. Acceptable v1 gap. + +*Endpoint contract, audit logging, and JWT-vs-proxy fallback are pinned in `deploy-v1-workflow-spec.md` Track A.* + +### 11.3 Bundling TS for the sandbox + +The `agent.ts` the user authors needs to run in the Daytona sandbox. Three options for the bundler: +- **esbuild** — fast, minimal config. Output a single-file CJS bundle the sandbox's `node runner.js` can execute. +- **tsx at runtime** — install tsx in the sandbox, run `tsx runner.ts` directly. No bundle step. +- **No transpile** — require user `agent.ts` to be pre-built (`pnpm tsc` in the persona dir). + +Default to **esbuild**. It's a one-import dependency and gives us deterministic output. + +### 11.4 Why no declarative `links:` block + +The user's notes had: +> issue opened in github, synced to linear — deterministic +> issue opened in github, slack message sent — deterministic OR agent reviews and summarizes + +We considered: +```json +"links": [{ "from": "github.issue.opened", "to": "linear.create", "template": "..." }] +``` + +Decision: defer. Every "deterministic" route in the wild has filters, conditions, retry semantics, or templated payloads — all of which pull it back toward code. Three lines in `onEvent` is clearer than a config-language with its own escape hatches: + +```ts +if (event.source === 'github' && event.type === 'issues.opened') { + await ctx.linear.create({ title: event.issue.title, body: event.issue.body }); + await ctx.slack.post('#triage', `Issue ${event.issue.url}`); +} +``` + +If we see N personas repeating the same routing skeleton, we lift it then. Premature abstraction here would lock us into a config shape we'd want to evolve. + +### 11.5 Multi-persona deployments + +Right now, one persona = one deployable. A user with three agents runs `workforce deploy` three times. That's fine for v1. We may want a `workforce.config.json` listing deployables later, but the implementation should treat that as sugar over the single-persona path. + +### 11.6 Local dev story for sandboxed handlers + +When the user iterates on `agent.ts`, the `--dev` mode runs the handler in their local process, fast. The `--sandbox` mode pushes to Daytona each restart, slow. We need a `--sandbox --watch` mode that rsync-mirrors local changes into the live sandbox. Stretch goal for today; trivial follow-up if it slips. + +### 11.7 Authorization & secrets in `--cloud` + +When `--cloud` mode lights up post-M4, the bundle uploaded to cloud must not contain plaintext provider tokens. The Relayfile connection token model already handles this (workspace holds the connection, agents request scoped tokens at runtime). Confirm with the cloud team that M4's accept-bundle endpoint takes a persona JSON + bundled JS only — no secrets baked in. + +--- + +## 12. Out-of-scope rejections (record so we don't drift) + +- **Polyglot handlers (`agent.py`, etc.)**: the user's notes mentioned `etc.py`. Phase 2. The runtime's SDK is TS-first; a Python adapter is a substantial extra package. +- **GUI / dashboard for deployed agents**: cloud-side surface, not workforce CLI. +- **Persona marketplace metadata**: pricing, install counts, ratings. Belongs in a future `marketplace` package. +- **Migrating sage / sales / nightcto onto this contract**: they stay as-is. The new substrate proves itself on greenfield agents first. +- **Cross-persona communication (`agent.send-to(other)`)**: relaycast already does this between agents that opt in. The persona JSON doesn't need a new field; `ctx.slack.post(...)` to a workspace channel works today. + +--- + +## 13. Definition of done (today) + +A user with: +- A fresh workforce install +- A clean `examples/weekly-digest/persona.json` and `agent.ts` +- Their GitHub workspace connected through `relayfile` + +Can run: + +``` +workforce login +workforce deploy ./examples/weekly-digest/persona.json --mode dev +``` + +And within 60 seconds see: +- A "Connect github? (Y/n)" prompt if not connected +- "Workspace = my-workspace, persona = weekly-digest, sandbox = on, mode = dev" +- A long-lived process printing `[runtime] cron schedule "weekly" registered` +- On forcing a `cron.tick`, the handler runs and posts a GitHub issue + +For `--mode sandbox`, the same flow with logs streaming from a Daytona sandbox. + +That's the shippable v1. diff --git a/examples/weekly-digest/README.md b/examples/weekly-digest/README.md new file mode 100644 index 0000000..4c6d998 --- /dev/null +++ b/examples/weekly-digest/README.md @@ -0,0 +1,55 @@ +# Example: `weekly-digest` + +Weekly competitive-intel agent. Runs every Saturday at 09:00 UTC, queries +Brave Search for the configured topics, dedupes + clusters by source host, +and upserts a single GitHub issue per ISO week into `WEEKLY_DIGEST_REPO`. + +## Required env + +```sh +export WEEKLY_DIGEST_TOPICS="agentworkforce,relayfile,proactive-agents" +export WEEKLY_DIGEST_REPO="YourOrg/weekly-digest" +export BRAVE_API_KEY="brave_..." + +# GitHub credentials — either path works: +export WORKFORCE_INTEGRATION_GITHUB_TOKEN="ghp_..." +# or, for a quick demo without Relayfile: +export GITHUB_TOKEN="ghp_..." + +# Workspace (only needed when actually launching, not for --dry-run): +export WORKFORCE_WORKSPACE_ID="ws_demo" +export WORKFORCE_WORKSPACE_TOKEN="ws_token_..." +``` + +## Deploy + +```sh +# Validate the persona without side effects. +workforce deploy ./examples/weekly-digest/persona.json --dry-run + +# Stage the bundle to a directory and inspect it (no launch). +workforce deploy ./examples/weekly-digest/persona.json \ + --bundle-out /tmp/wf-weekly-digest + +# Run locally as a long-lived process; pipe an envelope on stdin to fire +# the handler immediately. The runner exits when stdin closes. +workforce deploy ./examples/weekly-digest/persona.json --mode dev +``` + +## Firing the handler manually + +The runner reads NDJSON envelopes from stdin. To trigger the handler from +the command line, drive the bundle directly: + +```sh +echo '{"id":"manual-1","workspace":"ws_demo","type":"cron.tick","occurredAt":"2026-05-12T09:00:00Z","name":"weekly","cron":"0 9 * * 6"}' \ + | node /tmp/wf-weekly-digest/runner.mjs +``` + +The handler will: + +1. Resolve topics + repo + tokens from env. +2. Query Brave Search per topic. +3. Dedupe by URL and cluster results by source host. +4. Upsert a single `Weekly digest — YYYY-WNN` issue in the target repo. +5. Save a memory note tagged `weekly-digest` + `week:`. diff --git a/examples/weekly-digest/agent.ts b/examples/weekly-digest/agent.ts new file mode 100644 index 0000000..0474221 --- /dev/null +++ b/examples/weekly-digest/agent.ts @@ -0,0 +1,228 @@ +import { + createGithubClient, + handler, + WorkforceIntegrationError, + type GithubClient, + type WorkforceCtx, + type WorkforceEvent +} from '@agentworkforce/runtime'; + +interface DigestItem { + title: string; + url: string; + description: string; + host: string; +} + +interface DigestCluster { + host: string; + items: DigestItem[]; +} + +export default handler(async (ctx, event) => { + if (event.source !== 'cron' || event.name !== 'weekly') { + ctx.log('warn', 'weekly-digest.ignored', { source: event.source }); + return; + } + + const config = readConfig(); + const github = resolveGithubClient(ctx); + + const topics = parseTopics(config.topics); + const fetchedAt = new Date(event.occurredAt); + const isoWeek = isoWeekString(fetchedAt); + const title = `Weekly digest — ${isoWeek}`; + + ctx.log('info', 'weekly-digest.search.start', { topics, week: isoWeek }); + + const items: DigestItem[] = []; + for (const topic of topics) { + try { + const found = await searchBrave(topic, config.braveApiKey); + items.push(...found); + } catch (err) { + ctx.log('error', 'weekly-digest.search.failed', { + topic, + error: err instanceof Error ? err.message : String(err) + }); + } + } + + const deduped = dedupeByUrl(items); + const clusters = clusterByHost(deduped); + + if (clusters.length === 0) { + ctx.log('info', 'weekly-digest.no-results', { week: isoWeek }); + return; + } + + const body = renderDigest({ week: isoWeek, fetchedAt, topics, clusters }); + const [owner, repo] = config.repo.split('/'); + if (!owner || !repo) { + throw new Error(`weekly-digest: WEEKLY_DIGEST_REPO must be "owner/repo"; got "${config.repo}"`); + } + + const result = await github.upsertIssue({ + owner, + repo, + title, + body, + matchTitle: title, + labels: ['weekly-digest'] + }); + + ctx.log('info', 'weekly-digest.issue.upserted', { + week: isoWeek, + number: result.number, + url: result.url, + created: result.created, + clusterCount: clusters.length, + itemCount: deduped.length + }); + + await ctx.memory.save(`Weekly digest ${isoWeek} published: ${result.url}`, { + tags: ['weekly-digest', `week:${isoWeek}`], + scope: 'workspace' + }); +}); + +function readConfig(): { topics: string; repo: string; braveApiKey: string; githubToken: string } { + const topics = process.env.WEEKLY_DIGEST_TOPICS; + const repo = process.env.WEEKLY_DIGEST_REPO; + const braveApiKey = process.env.BRAVE_API_KEY; + const githubToken = + process.env.WORKFORCE_INTEGRATION_GITHUB_TOKEN ?? process.env.GITHUB_TOKEN ?? ''; + if (!topics || !topics.trim()) { + throw new Error('WEEKLY_DIGEST_TOPICS is required (comma-separated list)'); + } + if (!repo || !repo.trim()) { + throw new Error('WEEKLY_DIGEST_REPO is required (format: "owner/repo")'); + } + if (!braveApiKey) { + throw new Error('BRAVE_API_KEY is required to query Brave Search'); + } + if (!githubToken) { + throw new Error( + 'WORKFORCE_INTEGRATION_GITHUB_TOKEN (or GITHUB_TOKEN) is required to upsert the digest issue' + ); + } + return { topics, repo, braveApiKey, githubToken }; +} + +function resolveGithubClient(ctx: WorkforceCtx): GithubClient { + if (ctx.github) return ctx.github; + const token = + process.env.WORKFORCE_INTEGRATION_GITHUB_TOKEN ?? process.env.GITHUB_TOKEN ?? ''; + if (!token) { + throw new Error( + 'no GitHub client on ctx and no GITHUB_TOKEN in env — set WORKFORCE_INTEGRATION_GITHUB_TOKEN before deploy' + ); + } + return createGithubClient({ token }); +} + +function parseTopics(raw: string): string[] { + return raw + .split(',') + .map((t) => t.trim()) + .filter((t) => t.length > 0); +} + +async function searchBrave(query: string, apiKey: string): Promise { + const url = `https://api.search.brave.com/res/v1/web/search?q=${encodeURIComponent(query)}&count=10&freshness=pw`; + const response = await fetch(url, { + headers: { + accept: 'application/json', + 'X-Subscription-Token': apiKey, + 'user-agent': 'workforce-weekly-digest' + } + }); + if (!response.ok) { + throw new WorkforceIntegrationError({ + provider: 'brave', + operation: 'search', + message: `${response.status} ${response.statusText}`, + status: response.status, + retryable: response.status >= 500 || response.status === 429 + }); + } + const payload = (await response.json()) as { + web?: { results?: Array<{ title: string; url: string; description: string }> }; + }; + const results = payload.web?.results ?? []; + return results.map((r) => ({ + title: r.title, + url: r.url, + description: r.description, + host: safeHost(r.url) + })); +} + +function dedupeByUrl(items: DigestItem[]): DigestItem[] { + const seen = new Set(); + const out: DigestItem[] = []; + for (const item of items) { + if (seen.has(item.url)) continue; + seen.add(item.url); + out.push(item); + } + return out; +} + +function clusterByHost(items: DigestItem[]): DigestCluster[] { + const buckets = new Map(); + for (const item of items) { + const existing = buckets.get(item.host); + if (existing) existing.push(item); + else buckets.set(item.host, [item]); + } + return Array.from(buckets.entries()) + .map(([host, bucketItems]) => ({ host, items: bucketItems })) + .sort((a, b) => b.items.length - a.items.length); +} + +function renderDigest(args: { + week: string; + fetchedAt: Date; + topics: string[]; + clusters: DigestCluster[]; +}): string { + const lines: string[] = []; + lines.push(`# Weekly digest — ${args.week}`); + lines.push(''); + lines.push(`Fetched at ${args.fetchedAt.toISOString()}.`); + lines.push(`Topics: ${args.topics.join(', ')}`); + lines.push(''); + for (const cluster of args.clusters) { + lines.push(`## ${cluster.host} (${cluster.items.length})`); + for (const item of cluster.items) { + lines.push(`- [${item.title}](${item.url}) — ${truncate(item.description, 200)}`); + } + lines.push(''); + } + return lines.join('\n'); +} + +function safeHost(rawUrl: string): string { + try { + return new URL(rawUrl).hostname; + } catch { + return 'unknown'; + } +} + +function isoWeekString(date: Date): string { + const d = new Date(Date.UTC(date.getUTCFullYear(), date.getUTCMonth(), date.getUTCDate())); + const dayOfWeek = d.getUTCDay() || 7; + d.setUTCDate(d.getUTCDate() + 4 - dayOfWeek); + const yearStart = new Date(Date.UTC(d.getUTCFullYear(), 0, 1)); + const weekNum = Math.ceil(((d.getTime() - yearStart.getTime()) / 86400000 + 1) / 7); + return `${d.getUTCFullYear()}-W${String(weekNum).padStart(2, '0')}`; +} + +function truncate(s: string, n: number): string { + return s.length <= n ? s : `${s.slice(0, n - 1)}…`; +} + +// Touch the imported types so build does not warn on type-only imports. +type _Touch = WorkforceEvent; diff --git a/examples/weekly-digest/persona.json b/examples/weekly-digest/persona.json new file mode 100644 index 0000000..8ec35c5 --- /dev/null +++ b/examples/weekly-digest/persona.json @@ -0,0 +1,50 @@ +{ + "id": "weekly-digest", + "intent": "documentation", + "tags": ["documentation"], + "description": "Weekly competitive-intel digest. Searches the web for mentions of configured topics, dedupes and clusters by source domain, and upserts a single GitHub issue per ISO week.", + "cloud": true, + "integrations": { + "github": { + "scope": { "repo": "AgentWorkforce/weekly-digest" } + } + }, + "schedules": [ + { "name": "weekly", "cron": "0 9 * * 6", "tz": "UTC" } + ], + "sandbox": true, + "memory": { "enabled": true, "scopes": ["workspace"], "ttlDays": 90 }, + "onEvent": "./agent.ts", + "inputs": { + "WEEKLY_DIGEST_TOPICS": { + "description": "Comma-separated list of topics the agent searches for each week.", + "env": "WEEKLY_DIGEST_TOPICS", + "default": "agentworkforce,relayfile,proactive-agents" + }, + "WEEKLY_DIGEST_REPO": { + "description": "GitHub repo to upsert the digest issue into; format \"owner/repo\".", + "env": "WEEKLY_DIGEST_REPO", + "default": "AgentWorkforce/weekly-digest" + } + }, + "tiers": { + "best": { + "harness": "codex", + "model": "openai-codex/gpt-5.3-codex", + "systemPrompt": "Research the configured topics and produce a clustered weekly digest.", + "harnessSettings": { "reasoning": "high", "timeoutSeconds": 1200 } + }, + "best-value": { + "harness": "opencode", + "model": "opencode/gpt-5-nano", + "systemPrompt": "Research the configured topics and produce a clustered weekly digest.", + "harnessSettings": { "reasoning": "medium", "timeoutSeconds": 900 } + }, + "minimum": { + "harness": "opencode", + "model": "opencode/minimax-m2.5-free", + "systemPrompt": "Research the configured topics and produce a clustered weekly digest.", + "harnessSettings": { "reasoning": "low", "timeoutSeconds": 600 } + } + } +} diff --git a/packages/cli/package.json b/packages/cli/package.json index 540fe0d..1f5ef76 100644 --- a/packages/cli/package.json +++ b/packages/cli/package.json @@ -10,6 +10,7 @@ "package.json" ], "dependencies": { + "@agentworkforce/deploy": "workspace:*", "@agentworkforce/persona-kit": "workspace:*", "@agentworkforce/workload-router": "workspace:*", "@relayburn/sdk": "^2.5.2", diff --git a/packages/cli/src/cli.ts b/packages/cli/src/cli.ts index 4827bb0..ef9cd68 100644 --- a/packages/cli/src/cli.ts +++ b/packages/cli/src/cli.ts @@ -60,6 +60,7 @@ import { type AutoSyncHandle } from '@relayfile/local-mount'; import ora, { type Ora } from 'ora'; +import { runDeploy, runLogin } from './deploy-command.js'; import { startLaunchMetadataRecording, type LaunchMetadataRun @@ -3816,6 +3817,14 @@ export async function main(): Promise { await runPick(rest); } + if (subcommand === 'deploy') { + await runDeploy(rest); + } + + if (subcommand === 'login') { + await runLogin(rest); + } + if (subcommand !== 'agent') { die(`Unknown subcommand "${subcommand}".`); } diff --git a/packages/cli/src/deploy-command.ts b/packages/cli/src/deploy-command.ts new file mode 100644 index 0000000..ba2dcee --- /dev/null +++ b/packages/cli/src/deploy-command.ts @@ -0,0 +1,170 @@ +import path from 'node:path'; +import { + deploy, + type DeployMode, + type DeployOptions, + type ModeLaunchHandle +} from '@agentworkforce/deploy'; + +/** + * Argv parser + dispatcher for `workforce deploy [flags]`. + * Keeps cli.ts itself slim — the file is already a large dispatcher and + * each command lands in its own module when it grows past trivial. + */ +export async function runDeploy(args: readonly string[]): Promise { + if (args.length === 0 || args[0] === '-h' || args[0] === '--help') { + process.stdout.write(DEPLOY_USAGE); + process.exit(args.length === 0 ? 1 : 0); + } + + const parsed = parseDeployArgs(args); + + try { + const result = await deploy(parsed); + if (parsed.dryRun) { + process.stdout.write(`\nok: ${result.deploymentId} (dry-run)\n`); + process.exit(0); + } + if (parsed.bundleOut) { + process.stdout.write(`\nbundle: ${result.bundleDir}\n`); + process.exit(0); + } + process.stdout.write( + `\nok: ${result.deploymentId} (mode=${result.mode}, workspace=${result.workspace})\n` + ); + + // `--detach` returns immediately; otherwise the CLI blocks on the + // runner's `done` promise so logs keep streaming in the foreground + // and Ctrl-C tears the runner down cleanly. + if (parsed.detach || !isRunHandle(result.runHandle)) { + process.exit(0); + } + const exit = await result.runHandle.done; + process.exit(exit.code); + } catch (err) { + process.stderr.write( + `\nworkforce deploy failed: ${err instanceof Error ? err.message : String(err)}\n` + ); + process.exit(1); + } +} + +function isRunHandle(value: unknown): value is ModeLaunchHandle { + if (typeof value !== 'object' || value === null || !('done' in value)) { + return false; + } + const done = (value as { done?: unknown }).done; + return typeof done === 'object' && done !== null && typeof (done as { then?: unknown }).then === 'function'; +} + +export async function runLogin(args: readonly string[]): Promise { + if (args.length > 0 && (args[0] === '-h' || args[0] === '--help')) { + process.stdout.write(LOGIN_USAGE); + process.exit(0); + } + process.stderr.write( + 'The browser-based workforce login flow is rolling out in stages and is not on by default yet.\n' + + 'For now, export your workspace credentials in the shell:\n\n' + + ' export WORKFORCE_WORKSPACE_ID=\n' + + ' export WORKFORCE_WORKSPACE_TOKEN=\n\n' + + 'Then re-run `workforce deploy ./your-persona.json`.\n' + ); + process.exit(1); +} + +const DEPLOY_USAGE = `usage: workforce deploy [flags] + +Flags: + --mode dev|sandbox|cloud Pick a run mode (default: sandbox if Daytona/workspace creds resolve, else dev) + --workspace Workforce workspace; defaults to the active workspace + --no-connect Skip integration-connect prompts; fail if any are missing + --byo-sandbox Force BYO Daytona auth even when logged in + --detach Background the runner instead of streaming logs + --bundle-out Emit the bundle to and exit (no launch) + --dry-run Validate the persona and exit before any side effects + --cloud-url Override the workforce cloud base URL + -h, --help Print this message +`; + +const LOGIN_USAGE = `usage: workforce login + +Connect this machine to a workforce workspace. The full OAuth flow ships +once the cloud login surface is live; until then, set: + + export WORKFORCE_WORKSPACE_ID=... + export WORKFORCE_WORKSPACE_TOKEN=... +`; + +export function parseDeployArgs(args: readonly string[]): DeployOptions { + let personaPath: string | undefined; + let mode: DeployMode | undefined; + let workspace: string | undefined; + let noConnect = false; + let byoSandbox = false; + let detach = false; + let bundleOut: string | undefined; + let dryRun = false; + let cloudUrl: string | undefined; + + for (let i = 0; i < args.length; i += 1) { + const a = args[i]; + if (a === '-h' || a === '--help') { + process.stdout.write(DEPLOY_USAGE); + process.exit(0); + } else if (a === '--mode') { + const v = args[++i]; + if (v !== 'dev' && v !== 'sandbox' && v !== 'cloud') { + die(`--mode: expected one of dev|sandbox|cloud; got "${v ?? ''}"`); + } + mode = v; + } else if (a === '--workspace') { + workspace = expectValue('--workspace', args[++i]); + } else if (a === '--no-connect') { + noConnect = true; + } else if (a === '--byo-sandbox') { + byoSandbox = true; + } else if (a === '--detach') { + detach = true; + } else if (a === '--bundle-out') { + bundleOut = expectValue('--bundle-out', args[++i]); + } else if (a === '--dry-run') { + dryRun = true; + } else if (a === '--cloud-url') { + cloudUrl = expectValue('--cloud-url', args[++i]); + } else if (a.startsWith('--')) { + die(`deploy: unknown flag "${a}"`); + } else if (!personaPath) { + personaPath = path.resolve(a); + } else { + die(`deploy: unexpected positional argument "${a}"`); + } + } + + if (!personaPath) { + die('deploy: missing persona path. Usage: workforce deploy '); + } + + return { + personaPath, + ...(mode ? { mode } : {}), + ...(workspace ? { workspace } : {}), + ...(noConnect ? { noConnect: true } : {}), + ...(byoSandbox ? { byoSandbox: true } : {}), + ...(detach ? { detach: true } : {}), + ...(bundleOut ? { bundleOut } : {}), + ...(dryRun ? { dryRun: true } : {}), + ...(cloudUrl ? { cloudUrl } : {}) + }; +} + +function expectValue(flag: string, value: string | undefined): string { + if (typeof value !== 'string' || !value.trim()) { + die(`${flag}: missing value`); + } + return value; +} + +function die(message: string): never { + process.stderr.write(`${message}\n`); + process.exit(1); +} diff --git a/packages/deploy/package.json b/packages/deploy/package.json new file mode 100644 index 0000000..6fa22e4 --- /dev/null +++ b/packages/deploy/package.json @@ -0,0 +1,40 @@ +{ + "name": "@agentworkforce/deploy", + "version": "0.0.0", + "private": false, + "type": "module", + "main": "dist/index.js", + "types": "dist/index.d.ts", + "exports": { + ".": { + "types": "./dist/index.d.ts", + "default": "./dist/index.js" + } + }, + "files": [ + "dist", + "README.md", + "package.json" + ], + "repository": { + "type": "git", + "url": "https://github.com/AgentWorkforce/workforce", + "directory": "packages/deploy" + }, + "publishConfig": { + "access": "public" + }, + "scripts": { + "build": "tsc -p tsconfig.json", + "dev": "tsc -p tsconfig.json --watch --preserveWatchOutput", + "typecheck": "tsc -p tsconfig.json --noEmit", + "test": "tsc -p tsconfig.json && node --test dist/**/*.test.js dist/*.test.js", + "lint": "tsc -p tsconfig.json --noEmit" + }, + "dependencies": { + "@agentworkforce/persona-kit": "workspace:*", + "@agentworkforce/runtime": "workspace:*", + "@daytonaio/sdk": "^0.148.0", + "esbuild": "^0.25.0" + } +} diff --git a/packages/deploy/src/bundle.test.ts b/packages/deploy/src/bundle.test.ts new file mode 100644 index 0000000..253d00a --- /dev/null +++ b/packages/deploy/src/bundle.test.ts @@ -0,0 +1,112 @@ +import test from 'node:test'; +import assert from 'node:assert/strict'; +import { mkdtemp, readFile, rm, writeFile } from 'node:fs/promises'; +import path from 'node:path'; +import os from 'node:os'; +import { bundleStager } from './bundle.js'; +import type { PersonaSpec } from '@agentworkforce/persona-kit'; + +const baseRuntime = { + harness: 'claude' as const, + model: 'anthropic/claude-3-5-sonnet', + systemPrompt: 'be helpful', + harnessSettings: { reasoning: 'medium' as const, timeoutSeconds: 300 } +}; + +function persona(overrides: Partial = {}): PersonaSpec { + return { + id: 'bundle-fixture', + intent: 'documentation', + tags: ['documentation'], + description: 'fixture for bundle tests', + skills: [], + tiers: { best: baseRuntime, 'best-value': baseRuntime, minimum: baseRuntime }, + cloud: true, + schedules: [{ name: 'weekly', cron: '0 9 * * 6' }], + onEvent: './agent.ts', + ...overrides + }; +} + +test('bundleStager produces an executable, importable bundle from a real onEvent file', async () => { + const dir = await mkdtemp(path.join(os.tmpdir(), 'wf-bundle-')); + try { + const personaPath = path.join(dir, 'persona.json'); + const personaSpec = persona(); + await writeFile(personaPath, JSON.stringify(personaSpec, null, 2), 'utf8'); + await writeFile( + path.join(dir, 'agent.ts'), + [ + "import { handler } from '@agentworkforce/runtime';", + '', + 'export default handler(async (ctx, event) => {', + " ctx.log('info', 'fixture.handler.fired', { eventId: event.id });", + '});', + '' + ].join('\n'), + 'utf8' + ); + + const outDir = path.join(dir, 'build'); + const result = await bundleStager.stage({ + personaPath, + persona: personaSpec, + outDir + }); + + assert.equal(result.personaCopyPath, path.join(outDir, 'persona.json')); + assert.equal(result.runnerPath, path.join(outDir, 'runner.mjs')); + assert.equal(result.bundlePath, path.join(outDir, 'agent.bundle.mjs')); + assert.equal(result.packageJsonPath, path.join(outDir, 'package.json')); + assert.ok(result.sizeBytes > 0); + + // persona.json round-trips verbatim + const personaCopy = JSON.parse(await readFile(result.personaCopyPath, 'utf8')); + assert.equal(personaCopy.id, personaSpec.id); + assert.equal(personaCopy.onEvent, './agent.ts'); + + // runner imports the expected entry points + const runnerSource = await readFile(result.runnerPath, 'utf8'); + assert.match(runnerSource, /from '@agentworkforce\/runtime\/runner'/); + assert.match(runnerSource, /from '@agentworkforce\/runtime'/); + assert.match(runnerSource, /import \* as userModule from '\.\/agent\.bundle\.mjs'/); + + // bundle output is ES module shape and references the runtime as external + const bundleSource = await readFile(result.bundlePath, 'utf8'); + assert.match(bundleSource, /^import /m); + assert.match(bundleSource, /from\s+['"]@agentworkforce\/runtime['"]/); + } finally { + await rm(dir, { recursive: true, force: true }); + } +}); + +test('bundleStager throws when onEvent file is missing', async () => { + const dir = await mkdtemp(path.join(os.tmpdir(), 'wf-bundle-')); + try { + const personaPath = path.join(dir, 'persona.json'); + const personaSpec = persona({ onEvent: './missing.ts' }); + await writeFile(personaPath, JSON.stringify(personaSpec, null, 2), 'utf8'); + await assert.rejects( + () => bundleStager.stage({ personaPath, persona: personaSpec, outDir: path.join(dir, 'build') }), + /file not found/ + ); + } finally { + await rm(dir, { recursive: true, force: true }); + } +}); + +test('bundleStager throws when persona has no onEvent', async () => { + const dir = await mkdtemp(path.join(os.tmpdir(), 'wf-bundle-')); + try { + const personaPath = path.join(dir, 'persona.json'); + const personaSpec = persona(); + delete (personaSpec as { onEvent?: string }).onEvent; + await writeFile(personaPath, JSON.stringify(personaSpec, null, 2), 'utf8'); + await assert.rejects( + () => bundleStager.stage({ personaPath, persona: personaSpec, outDir: path.join(dir, 'build') }), + /missing onEvent/ + ); + } finally { + await rm(dir, { recursive: true, force: true }); + } +}); diff --git a/packages/deploy/src/bundle.ts b/packages/deploy/src/bundle.ts new file mode 100644 index 0000000..7d05d28 --- /dev/null +++ b/packages/deploy/src/bundle.ts @@ -0,0 +1,153 @@ +import { mkdir, writeFile, stat } from 'node:fs/promises'; +import path from 'node:path'; +import { build } from 'esbuild'; +import type { BundleStageInput, BundleResult, BundleStager } from './types.js'; + +/** + * Versioned identifier embedded in the generated runner so a future + * bundle reader can detect format drift. Bumped whenever the runner + * shape changes incompatibly. + */ +const RUNNER_FORMAT_VERSION = 1; + +/** + * Stage a deploy-ready bundle to `input.outDir`. Output layout: + * + * / + * agent.bundle.mjs — esbuilt user `onEvent` (default-exported handler) + * runner.mjs — entry that imports the runtime + bundle + persona + * persona.json — verbatim copy of the input persona spec + * package.json — minimal manifest pinning the runtime dep + * + * The bundle is idempotent: re-running with the same `outDir` overwrites + * the four files cleanly. Auxiliary files left behind from earlier runs + * are not touched (callers control the directory lifecycle). + * + * Externals: every `node:*` builtin and `@agentworkforce/runtime` itself + * are left external so the runner can resolve them at execution time. + * Bundling the runtime in would require shipping the runtime sources into + * every sandbox; the chosen split keeps the bundle small and lets ops + * patch the runtime without rebuilding every persona. + */ +export const bundleStager: BundleStager = { + async stage(input: BundleStageInput): Promise { + await mkdir(input.outDir, { recursive: true }); + + const onEventAbs = path.resolve(path.dirname(input.personaPath), input.persona.onEvent ?? ''); + if (!input.persona.onEvent) { + throw new Error( + `bundle: persona "${input.persona.id}" is missing onEvent (cannot stage a bundle without a handler)` + ); + } + await assertReadableFile(onEventAbs, `persona "${input.persona.id}" onEvent`); + + const bundlePath = path.join(input.outDir, 'agent.bundle.mjs'); + const runnerPath = path.join(input.outDir, 'runner.mjs'); + const personaCopyPath = path.join(input.outDir, 'persona.json'); + const packageJsonPath = path.join(input.outDir, 'package.json'); + + await build({ + entryPoints: [onEventAbs], + outfile: bundlePath, + bundle: true, + format: 'esm', + platform: 'node', + target: 'node20', + sourcemap: 'inline', + logLevel: 'silent', + minify: input.bundlerOptions?.minify ?? false, + // Resolve TypeScript / JS extensions without forcing the user to + // write `.ts`-suffixed imports in their handler file. + resolveExtensions: ['.ts', '.mts', '.cts', '.tsx', '.js', '.mjs', '.cjs', '.jsx', '.json'], + external: [ + // Runtime stays external — see file header comment. + '@agentworkforce/runtime', + '@agentworkforce/runtime/raw', + // Node builtins must never be bundled. + 'node:*' + ] + }); + + await writeFile(personaCopyPath, JSON.stringify(input.persona, null, 2) + '\n', 'utf8'); + + await writeFile(packageJsonPath, buildPackageJson(input.persona.id), 'utf8'); + + await writeFile(runnerPath, renderRunner(), 'utf8'); + + const bundleStat = await stat(bundlePath); + const runnerStat = await stat(runnerPath); + const sizeBytes = bundleStat.size + runnerStat.size; + + return { + personaCopyPath, + runnerPath, + bundlePath, + packageJsonPath, + sizeBytes + }; + } +}; + +function buildPackageJson(personaId: string): string { + return ( + JSON.stringify( + { + name: `@agentworkforce/deployed-${personaId}`, + private: true, + version: '0.0.0', + type: 'module', + main: './runner.mjs', + dependencies: { + '@agentworkforce/runtime': '*' + }, + comment: + 'Generated by workforce deploy. The runtime dep is pinned to "*" because deploys resolve the runtime version from the active workspace.' + }, + null, + 2 + ) + '\n' + ); +} + +function renderRunner(): string { + return `// Generated by @agentworkforce/deploy. Format version ${RUNNER_FORMAT_VERSION}. +// Do not edit by hand — \`workforce deploy\` overwrites this file on every stage. +// +// The runner imports the user's handler from the esbuilt bundle, the +// parsed persona spec from the verbatim JSON copy, and the runtime's +// \`startRunner\` to drive the dispatch loop. Envelopes arrive on stdin +// as NDJSON; structured logs go to stdout. + +import { createRequire } from 'node:module'; +import { startRunner } from '@agentworkforce/runtime/runner'; +import { handler as wrapHandler } from '@agentworkforce/runtime'; +import * as userModule from './agent.bundle.mjs'; + +const require = createRequire(import.meta.url); +const persona = require('./persona.json'); + +const candidate = userModule.default ?? userModule.handler; +if (typeof candidate !== 'function') { + throw new TypeError( + \`workforce deploy bundle: \${persona.id} did not default-export a function. Did you forget \\\`export default handler(...)\\\`?\` + ); +} +const handler = candidate.__workforceHandler ? candidate : wrapHandler(candidate); + +await startRunner({ persona, handler }); +`; +} + +async function assertReadableFile(abs: string, label: string): Promise { + try { + const st = await stat(abs); + if (!st.isFile()) { + throw new Error(`${label}: ${abs} is not a regular file`); + } + } catch (err) { + if ((err as NodeJS.ErrnoException).code === 'ENOENT') { + throw new Error(`${label}: file not found at ${abs}`); + } + throw err; + } +} diff --git a/packages/deploy/src/connect.ts b/packages/deploy/src/connect.ts new file mode 100644 index 0000000..9998b66 --- /dev/null +++ b/packages/deploy/src/connect.ts @@ -0,0 +1,196 @@ +import type { PersonaSpec } from '@agentworkforce/persona-kit'; +import type { DeployIO, IntegrationConnectOutcome } from './types.js'; + +/** + * Provider env-var conventions the deploy CLI checks when no higher-level + * integration resolver is supplied. The convention is: + * + * - `WORKFORCE_INTEGRATION__TOKEN` — direct provider token + * (e.g. `WORKFORCE_INTEGRATION_GITHUB_TOKEN`). Treated as "connected" + * when present and non-empty. + * - or `WORKFORCE_INTEGRATION__CONNECTION_ID` — Relayfile + * connection id, resolved to a scoped token at runtime by the agent. + * + * The connect side is a no-op for the env path: there is nothing to + * authenticate interactively. Authors plug a higher-level resolver into + * `DeployResolvers.integrations` once Relayfile's OAuth surface is wired. + */ +const PROVIDER_ENV_PREFIX = 'WORKFORCE_INTEGRATION_'; + +/** + * Resolver the orchestrator uses to check + connect a Relayfile-backed + * provider for the active workspace. The deploy package does not depend + * on `@relayfile/sdk` directly; the CLI dispatches the real implementation + * (which imports the SDK) into this contract. + * + * Decoupling this keeps the orchestrator unit-testable without spinning + * up Relayfile and keeps the SDK out of the deploy package's transitive + * dep tree (smaller bin, faster install). + */ +export interface IntegrationConnectResolver { + /** Is the provider already linked to the workspace? */ + isConnected(args: { workspace: string; provider: string }): Promise; + /** Run the browser-based OAuth flow and resolve when the user finishes. */ + connect(args: { workspace: string; provider: string }): Promise<{ connectionId: string }>; +} + +/** + * Provider linker for `useSubscription: true` personas — connects the + * user's chosen LLM provider so cloud inference is billed against their + * subscription rather than workforce. + */ +export interface ProviderSubscriptionResolver { + isConnected(args: { workspace: string; providerHint?: string }): Promise; + connect(args: { workspace: string; providerHint?: string }): Promise<{ provider: string }>; +} + +/** + * Resolver backed by env vars. Used as the default when no higher-level + * implementation is plugged in. `isConnected` returns true exactly when + * one of the two recognized env vars is set for the provider; `connect` + * is a no-op that records the env-resolved nature of the connection so + * the orchestrator's flow stays uniform across resolvers. + */ +export function envIntegrationResolver(): IntegrationConnectResolver { + return { + async isConnected({ provider }) { + return providerHasEnvCredentials(provider); + }, + async connect({ provider }) { + if (!providerHasEnvCredentials(provider)) { + throw new Error( + `env resolver: ${provider} is not connected. Set ${PROVIDER_ENV_PREFIX}${provider.toUpperCase()}_TOKEN or ${PROVIDER_ENV_PREFIX}${provider.toUpperCase()}_CONNECTION_ID, then re-run deploy. (Higher-level resolvers — e.g. a Relayfile OAuth flow — plug in via DeployResolvers.integrations.)` + ); + } + return { connectionId: `env:${provider}` }; + } + }; +} + +function providerHasEnvCredentials(provider: string): boolean { + const upper = provider.toUpperCase(); + return Boolean( + process.env[`${PROVIDER_ENV_PREFIX}${upper}_TOKEN`] || + process.env[`${PROVIDER_ENV_PREFIX}${upper}_CONNECTION_ID`] + ); +} + +export interface ConnectAllInput { + persona: PersonaSpec; + workspace: string; + noConnect: boolean; + io: DeployIO; + integrations: IntegrationConnectResolver; + /** Required only when persona.useSubscription is true. */ + subscription?: ProviderSubscriptionResolver; +} + +export interface ConnectAllResult { + outcomes: IntegrationConnectOutcome[]; + /** Provider the subscription was bound to, when applicable. */ + subscriptionProvider?: string; +} + +/** + * Walk the persona's declared integrations and ensure each is connected. + * Per the deploy-v1 spec, the orchestrator prompts before each provider's + * connect flow ("Connect github now? (Y/n)") so users running on a shared + * machine don't have surprise browser pops. + * + * Behavior summary: + * - integrations: {} or undefined → returns immediately, no prompts + * - already-connected provider → no prompt; emits `already-connected` + * - not connected + noConnect=true → fails the deploy with a clear message + * - not connected + noConnect=false → prompts; on yes runs `connect`, + * on no marks `skipped`. The orchestrator decides what to do with + * `skipped` outcomes (today: fails the deploy at the call site). + */ +export async function connectIntegrations(input: ConnectAllInput): Promise { + const integrations = input.persona.integrations ?? {}; + const outcomes: IntegrationConnectOutcome[] = []; + + for (const provider of Object.keys(integrations)) { + const connected = await input.integrations + .isConnected({ workspace: input.workspace, provider }) + .catch((err) => { + input.io.warn( + `failed to check connection status for ${provider}: ${err instanceof Error ? err.message : String(err)}` + ); + return false; + }); + + if (connected) { + input.io.info(`integrations.${provider}: already connected`); + outcomes.push({ provider, status: 'already-connected' }); + continue; + } + + if (input.noConnect) { + input.io.error( + `integrations.${provider}: not connected, and --no-connect was passed` + ); + outcomes.push({ + provider, + status: 'failed', + message: 'not connected (--no-connect was set)' + }); + continue; + } + + const shouldConnect = await input.io.confirm( + `Connect ${provider} now? (opens browser)`, + { defaultValue: true } + ); + if (!shouldConnect) { + outcomes.push({ provider, status: 'skipped', message: 'user declined to connect' }); + continue; + } + + try { + const result = await input.integrations.connect({ workspace: input.workspace, provider }); + input.io.info(`integrations.${provider}: connected (${result.connectionId})`); + outcomes.push({ provider, status: 'connected-now' }); + } catch (err) { + const message = err instanceof Error ? err.message : String(err); + input.io.error(`integrations.${provider}: connect failed: ${message}`); + outcomes.push({ provider, status: 'failed', message }); + } + } + + let subscriptionProvider: string | undefined; + if (input.persona.useSubscription) { + if (!input.subscription) { + throw new Error( + 'persona has useSubscription:true but no subscription resolver was supplied to the deploy orchestrator' + ); + } + const isConn = await input.subscription + .isConnected({ workspace: input.workspace }) + .catch(() => false); + if (!isConn) { + if (input.noConnect) { + throw new Error( + 'persona requires a subscription provider connection, but --no-connect was passed' + ); + } + const ok = await input.io.confirm( + 'persona has useSubscription:true — connect your LLM provider now?', + { defaultValue: true } + ); + if (!ok) { + throw new Error('user declined the subscription provider connect; deploy aborted'); + } + const result = await input.subscription.connect({ workspace: input.workspace }); + subscriptionProvider = result.provider; + input.io.info(`subscription: connected (${result.provider})`); + } else { + subscriptionProvider = '(already-connected)'; + input.io.info('subscription: already connected'); + } + } + + return { + outcomes, + ...(subscriptionProvider ? { subscriptionProvider } : {}) + }; +} diff --git a/packages/deploy/src/deploy.test.ts b/packages/deploy/src/deploy.test.ts new file mode 100644 index 0000000..da95bbb --- /dev/null +++ b/packages/deploy/src/deploy.test.ts @@ -0,0 +1,344 @@ +import test from 'node:test'; +import assert from 'node:assert/strict'; +import { mkdtemp, rm, writeFile, mkdir } from 'node:fs/promises'; +import path from 'node:path'; +import os from 'node:os'; +import { deploy } from './deploy.js'; +import { createBufferedIO } from './io.js'; +import { preflightPersona } from './preflight.js'; +import type { + BundleStager, + IntegrationConnectResolver, + ModeLauncher, + WorkspaceAuth +} from './index.js'; + +const baseRuntime = { + harness: 'claude', + model: 'anthropic/claude-3-5-sonnet', + systemPrompt: 'be helpful', + harnessSettings: { reasoning: 'medium', timeoutSeconds: 300 } +}; + +function basePersonaJson(overrides: Record = {}): Record { + return { + id: 'demo', + intent: 'documentation', + tags: ['documentation'], + description: 'test persona', + tiers: { + best: baseRuntime, + 'best-value': baseRuntime, + minimum: baseRuntime + }, + cloud: true, + schedules: [{ name: 'weekly', cron: '0 9 * * 6' }], + onEvent: './agent.ts', + ...overrides + }; +} + +async function withTempPersona( + persona: Record, + agentSource = 'export default async () => {};' +): Promise<{ dir: string; personaPath: string; cleanup: () => Promise }> { + const dir = await mkdtemp(path.join(os.tmpdir(), 'wf-deploy-test-')); + const personaPath = path.join(dir, 'persona.json'); + await writeFile(personaPath, JSON.stringify(persona, null, 2), 'utf8'); + await writeFile(path.join(dir, 'agent.ts'), agentSource, 'utf8'); + return { + dir, + personaPath, + cleanup: () => rm(dir, { recursive: true, force: true }) + }; +} + +test('preflightPersona accepts a valid deploy-shaped persona', async () => { + const { personaPath, cleanup } = await withTempPersona(basePersonaJson()); + try { + const pre = await preflightPersona(personaPath); + assert.equal(pre.persona.id, 'demo'); + assert.deepEqual(pre.schedules, ['weekly']); + assert.deepEqual(pre.integrations, []); + assert.equal(pre.warnings.length, 0); + } finally { + await cleanup(); + } +}); + +test('preflightPersona refuses when cloud is not true', async () => { + const { personaPath, cleanup } = await withTempPersona(basePersonaJson({ cloud: false })); + try { + await assert.rejects(preflightPersona(personaPath), /not opted into deploy/); + } finally { + await cleanup(); + } +}); + +test('preflightPersona refuses when no triggers are declared', async () => { + const { personaPath, cleanup } = await withTempPersona( + basePersonaJson({ schedules: undefined, integrations: undefined }) + ); + try { + await assert.rejects(preflightPersona(personaPath), /has no triggers/); + } finally { + await cleanup(); + } +}); + +test('preflightPersona refuses when onEvent file is missing', async () => { + const { personaPath, cleanup } = await withTempPersona(basePersonaJson({ onEvent: './does-not-exist.ts' })); + try { + await assert.rejects(preflightPersona(personaPath), /onEvent file not found/); + } finally { + await cleanup(); + } +}); + +test('preflightPersona warns on unknown triggers but does not fail', async () => { + const { personaPath, cleanup } = await withTempPersona( + basePersonaJson({ + schedules: undefined, + integrations: { + github: { triggers: [{ on: 'pull_request.imagined_event' }] } + } + }) + ); + try { + const pre = await preflightPersona(personaPath); + assert.equal(pre.warnings.length, 1); + assert.match(pre.warnings[0], /pull_request\.imagined_event/); + } finally { + await cleanup(); + } +}); + +test('deploy --dry-run validates persona and exits before side effects', async () => { + const { personaPath, cleanup } = await withTempPersona(basePersonaJson()); + const io = createBufferedIO(); + try { + const result = await deploy({ personaPath, dryRun: true, io }); + assert.equal(result.deploymentId, 'demo'); + assert.deepEqual(result.schedules, ['weekly']); + assert.ok(io.messages.find((m) => m.message.includes('--dry-run'))); + // No workspace resolution happened. + assert.ok(!io.messages.find((m) => m.message.startsWith('workspace:'))); + } finally { + await cleanup(); + } +}); + +test('deploy fails clearly when integration is not connected and --no-connect is set', async () => { + const { personaPath, cleanup } = await withTempPersona( + basePersonaJson({ integrations: { github: { triggers: [{ on: 'pull_request.opened' }] } } }) + ); + const io = createBufferedIO(); + const workspaceAuth: WorkspaceAuth = { + async resolveWorkspace() { + return { workspace: 'ws-test', token: 'tok' }; + } + }; + const integrations: IntegrationConnectResolver = { + async isConnected() { + return false; + }, + async connect() { + throw new Error('should not be called when --no-connect is set'); + } + }; + try { + await assert.rejects( + deploy( + { personaPath, mode: 'dev', noConnect: true, io }, + { workspaceAuth, integrations } + ), + /failed to connect/ + ); + assert.ok( + io.messages.find( + (m) => m.level === 'error' && m.message.includes('--no-connect was passed') + ) + ); + } finally { + await cleanup(); + } +}); + +test('deploy stages a bundle and hands off to the resolved launcher', async () => { + const { personaPath, dir, cleanup } = await withTempPersona(basePersonaJson()); + const io = createBufferedIO(); + let stagedTo = ''; + const bundleStager: BundleStager = { + async stage(input) { + stagedTo = input.outDir; + await mkdir(input.outDir, { recursive: true }); + const runner = path.join(input.outDir, 'runner.mjs'); + const bundle = path.join(input.outDir, 'agent.bundle.mjs'); + const personaCopy = path.join(input.outDir, 'persona.json'); + const pkg = path.join(input.outDir, 'package.json'); + await Promise.all([ + writeFile(runner, '', 'utf8'), + writeFile(bundle, '', 'utf8'), + writeFile(personaCopy, '{}', 'utf8'), + writeFile(pkg, '{}', 'utf8') + ]); + return { + runnerPath: runner, + bundlePath: bundle, + personaCopyPath: personaCopy, + packageJsonPath: pkg, + sizeBytes: 2 + }; + } + }; + + let launched = 0; + const devLauncher: ModeLauncher = { + async launch(input) { + launched += 1; + assert.equal(input.persona.id, 'demo'); + return { + id: 'pid-1', + async stop() { + /* no-op */ + }, + done: Promise.resolve({ code: 0 }) + }; + } + }; + + const workspaceAuth: WorkspaceAuth = { + async resolveWorkspace() { + return { workspace: 'ws-test', token: 'tok' }; + } + }; + const integrations: IntegrationConnectResolver = { + async isConnected() { + return true; + }, + async connect() { + throw new Error('connect should not be called when everything is already connected'); + } + }; + + try { + const result = await deploy( + { personaPath, mode: 'dev', io }, + { workspaceAuth, integrations, bundle: bundleStager, modes: { dev: devLauncher } } + ); + assert.equal(launched, 1); + assert.equal(result.mode, 'dev'); + assert.equal(result.workspace, 'ws-test'); + assert.ok(result.bundleDir.startsWith(dir)); + assert.equal(stagedTo, result.bundleDir); + assert.ok(io.messages.find((m) => m.message.includes('launched: dev/pid-1'))); + } finally { + await cleanup(); + } +}); + +test('deploy --bundle-out emits to the supplied dir and skips launch', async () => { + const { personaPath, cleanup } = await withTempPersona(basePersonaJson()); + const outDir = await mkdtemp(path.join(os.tmpdir(), 'wf-deploy-out-')); + const io = createBufferedIO(); + + let launched = false; + const devLauncher: ModeLauncher = { + async launch() { + launched = true; + throw new Error('launch should not run with --bundle-out'); + } + }; + const bundleStager: BundleStager = { + async stage(input) { + await mkdir(input.outDir, { recursive: true }); + const runner = path.join(input.outDir, 'runner.mjs'); + const bundle = path.join(input.outDir, 'agent.bundle.mjs'); + const personaCopy = path.join(input.outDir, 'persona.json'); + const pkg = path.join(input.outDir, 'package.json'); + await Promise.all([ + writeFile(runner, '', 'utf8'), + writeFile(bundle, '', 'utf8'), + writeFile(personaCopy, '{}', 'utf8'), + writeFile(pkg, '{}', 'utf8') + ]); + return { + runnerPath: runner, + bundlePath: bundle, + personaCopyPath: personaCopy, + packageJsonPath: pkg, + sizeBytes: 1 + }; + } + }; + + try { + const result = await deploy( + { personaPath, mode: 'dev', io, bundleOut: outDir }, + { + workspaceAuth: { + async resolveWorkspace() { + return { workspace: 'w', token: 't' }; + } + }, + integrations: { + async isConnected() { + return true; + }, + async connect() { + throw new Error('unreachable'); + } + }, + bundle: bundleStager, + modes: { dev: devLauncher } + } + ); + assert.equal(launched, false); + assert.equal(result.bundleDir, path.resolve(outDir)); + assert.ok(io.messages.find((m) => m.message.includes('skipping launch'))); + } finally { + await cleanup(); + await rm(outDir, { recursive: true, force: true }); + } +}); + +test('--mode cloud throws a clear "not yet available" error', async () => { + const { personaPath, cleanup } = await withTempPersona(basePersonaJson()); + const io = createBufferedIO(); + try { + await assert.rejects( + deploy( + { personaPath, mode: 'cloud', io }, + { + workspaceAuth: { + async resolveWorkspace() { + return { workspace: 'w', token: 't' }; + } + }, + integrations: { + async isConnected() { + return true; + }, + async connect() { + throw new Error('unreachable'); + } + }, + bundle: { + async stage() { + return { + runnerPath: '/tmp/r', + bundlePath: '/tmp/b', + personaCopyPath: '/tmp/p', + packageJsonPath: '/tmp/k', + sizeBytes: 0 + }; + } + } + } + ), + /--mode cloud is not yet available/ + ); + } finally { + await cleanup(); + } +}); diff --git a/packages/deploy/src/deploy.ts b/packages/deploy/src/deploy.ts new file mode 100644 index 0000000..714dc19 --- /dev/null +++ b/packages/deploy/src/deploy.ts @@ -0,0 +1,215 @@ +import path from 'node:path'; +import { mkdir } from 'node:fs/promises'; +import { bundleStager } from './bundle.js'; +import { + connectIntegrations, + envIntegrationResolver, + type IntegrationConnectResolver, + type ProviderSubscriptionResolver +} from './connect.js'; +import { createTerminalIO } from './io.js'; +import { envWorkspaceAuth, type WorkspaceAuth } from './login.js'; +import { devLauncher } from './modes/dev.js'; +import { sandboxLauncher } from './modes/sandbox.js'; +import { cloudLauncher } from './modes/cloud.js'; +import { preflightPersona } from './preflight.js'; +import type { + BundleStager, + DeployMode, + DeployOptions, + DeployResult, + ModeLauncher +} from './types.js'; + +/** + * External-resolver bundle the orchestrator depends on. Each field has a + * real default backed by env (or, for `bundle`/`modes`, the real + * launchers). Callers override individual fields to plug in higher-level + * implementations: a CLI dispatch case may pass an `IntegrationResolver` + * backed by `@relayfile/sdk`'s OAuth flow once it is available, tests + * pass deterministic in-memory fakes. + */ +export interface DeployResolvers { + workspaceAuth?: WorkspaceAuth; + integrations?: IntegrationConnectResolver; + subscription?: ProviderSubscriptionResolver; + bundle?: BundleStager; + modes?: Partial>; +} + +/** + * Pick the run mode for this deploy. Per the deploy-v1 spec: + * - Explicit `--mode` always wins. + * - Otherwise `--mode sandbox` is the default when Daytona creds resolve + * (BYO env or workforce-managed both count as "resolved" here; the + * sandbox launcher itself decides which auth path to use). + * - Otherwise fall back to `--mode dev`. + * + * The orchestrator doesn't probe the cloud endpoint here — `--mode cloud` + * stays opt-in until the M4 endpoint is live. + */ +export function pickMode(opts: DeployOptions): DeployMode { + if (opts.mode) return opts.mode; + // Daytona credential probe: BYO env var, or assume workforce-managed via + // the active workspace (the sandbox launcher gates on its own auth). + if (process.env.DAYTONA_API_KEY || process.env.WORKFORCE_WORKSPACE_TOKEN) { + return 'sandbox'; + } + return 'dev'; +} + +/** + * Top-level entry. The CLI dispatch case calls this with parsed options + * and resolvers. Returns a `DeployResult` summarizing the deploy; on + * failure, throws with an actionable message (no half-deploys). + * + * Step ordering — see `docs/plans/deploy-v1.md` §5: + * 1. Preflight persona (parse, lint, onEvent on disk). + * 2. Resolve workspace + token. + * 3. Connect integrations (prompt per provider). + * 4. Stage bundle to `.workforce/build//`. + * 5. Launch in the resolved mode. + * 6. Return the handle + summary. + * + * `dryRun: true` exits cleanly after step 1, returning a minimal result + * with the warnings collected so far. + * + * `bundleOut: ` runs steps 1-4 then exits, skipping launch. + */ +export async function deploy(opts: DeployOptions, resolvers: DeployResolvers = {}): Promise { + const io = opts.io ?? createTerminalIO(); + const warnings: string[] = []; + + io.info(`workforce deploy → ${opts.personaPath}`); + + const preflight = await preflightPersona(opts.personaPath); + warnings.push(...preflight.warnings); + for (const w of preflight.warnings) io.warn(w); + + io.info( + `persona ${preflight.persona.id}: ${preflight.integrations.length} integration(s), ${preflight.schedules.length} schedule(s)` + ); + + if (opts.dryRun) { + io.info('--dry-run: persona validated; exiting before any side effects'); + return { + deploymentId: preflight.persona.id, + mode: opts.mode ?? pickMode(opts), + workspace: opts.workspace ?? '(dry-run)', + bundleDir: '(dry-run)', + connectedIntegrations: [], + schedules: preflight.schedules, + warnings + }; + } + + // `--bundle-out` produces a workspace-agnostic artifact, so skip the + // workspace/integration handshakes entirely. Anyone bundling for CI + // or inspection shouldn't need credentials they don't yet have. + if (opts.bundleOut) { + const bundleDir = path.resolve(opts.bundleOut); + await mkdir(bundleDir, { recursive: true }); + const stager = resolvers.bundle ?? bundleStager; + const bundle = await stager.stage({ + personaPath: preflight.personaPath, + persona: preflight.persona, + outDir: bundleDir + }); + io.info(`bundle: staged to ${bundle.runnerPath} (${formatBytes(bundle.sizeBytes)})`); + io.info(`--bundle-out: bundle ready at ${bundleDir}; skipping launch`); + return { + deploymentId: preflight.persona.id, + mode: opts.mode ?? pickMode(opts), + workspace: opts.workspace ?? '(bundle-only)', + bundleDir, + connectedIntegrations: [], + schedules: preflight.schedules, + warnings + }; + } + + const workspaceAuth = resolvers.workspaceAuth ?? envWorkspaceAuth(); + const { workspace } = await workspaceAuth.resolveWorkspace({ + override: opts.workspace, + io + }); + io.info(`workspace: ${workspace}`); + + const connectResult = await connectIntegrations({ + persona: preflight.persona, + workspace, + noConnect: opts.noConnect === true, + io, + integrations: resolvers.integrations ?? envIntegrationResolver(), + ...(resolvers.subscription ? { subscription: resolvers.subscription } : {}) + }); + const failed = connectResult.outcomes.filter((o) => o.status === 'failed'); + if (failed.length > 0) { + throw new Error( + `deploy aborted: ${failed.length} integration(s) failed to connect: ${failed.map((f) => f.provider).join(', ')}` + ); + } + const skipped = connectResult.outcomes.filter((o) => o.status === 'skipped'); + if (skipped.length > 0) { + throw new Error( + `deploy aborted: ${skipped.length} integration(s) skipped: ${skipped.map((s) => s.provider).join(', ')}` + ); + } + const connectedIntegrations = connectResult.outcomes + .filter((o) => o.status === 'already-connected' || o.status === 'connected-now') + .map((o) => o.provider); + + const bundleDir = path.resolve( + path.join(preflight.personaDir, '.workforce', 'build', preflight.persona.id) + ); + await mkdir(bundleDir, { recursive: true }); + const stager = resolvers.bundle ?? bundleStager; + const bundle = await stager.stage({ + personaPath: preflight.personaPath, + persona: preflight.persona, + outDir: bundleDir + }); + io.info(`bundle: staged to ${bundle.runnerPath} (${formatBytes(bundle.sizeBytes)})`); + + const mode: DeployMode = opts.mode ?? pickMode(opts); + io.info(`mode: ${mode}`); + const launcher = resolveLauncher(mode, resolvers); + const handle = await launcher.launch({ + persona: preflight.persona, + bundle, + workspace, + io, + ...(opts.detach ? { detach: true } : {}) + }); + io.info(`launched: ${mode}/${handle.id}`); + + return { + deploymentId: preflight.persona.id, + mode, + workspace, + bundleDir, + connectedIntegrations, + schedules: preflight.schedules, + runHandle: handle, + warnings + }; +} + +function resolveLauncher(mode: DeployMode, resolvers: DeployResolvers): ModeLauncher { + const supplied = resolvers.modes?.[mode]; + if (supplied) return supplied; + switch (mode) { + case 'dev': + return devLauncher; + case 'sandbox': + return sandboxLauncher; + case 'cloud': + return cloudLauncher; + } +} + +function formatBytes(bytes: number): string { + if (bytes < 1024) return `${bytes}B`; + if (bytes < 1024 * 1024) return `${(bytes / 1024).toFixed(1)}KB`; + return `${(bytes / (1024 * 1024)).toFixed(1)}MB`; +} diff --git a/packages/deploy/src/index.ts b/packages/deploy/src/index.ts new file mode 100644 index 0000000..34d2173 --- /dev/null +++ b/packages/deploy/src/index.ts @@ -0,0 +1,31 @@ +export { deploy, pickMode, type DeployResolvers } from './deploy.js'; +export { preflightPersona } from './preflight.js'; +export { + connectIntegrations, + envIntegrationResolver, + type ConnectAllInput, + type ConnectAllResult, + type IntegrationConnectResolver, + type ProviderSubscriptionResolver +} from './connect.js'; +export { envWorkspaceAuth, type WorkspaceAuth } from './login.js'; +export { createTerminalIO, createBufferedIO, type BufferedIO } from './io.js'; +export { bundleStager } from './bundle.js'; +export { devLauncher } from './modes/dev.js'; +export { sandboxLauncher, resolveSandboxAuth, type SandboxAuth } from './modes/sandbox.js'; +export { cloudLauncher } from './modes/cloud.js'; + +export type { + BundleResult, + BundleStageInput, + BundleStager, + DeployIO, + DeployMode, + DeployOptions, + DeployPreflight, + DeployResult, + IntegrationConnectOutcome, + ModeLaunchHandle, + ModeLaunchInput, + ModeLauncher +} from './types.js'; diff --git a/packages/deploy/src/io.ts b/packages/deploy/src/io.ts new file mode 100644 index 0000000..bf6b0c1 --- /dev/null +++ b/packages/deploy/src/io.ts @@ -0,0 +1,84 @@ +import readline from 'node:readline/promises'; +import type { DeployIO } from './types.js'; + +/** + * Default IO implementation backed by the terminal. Writes status to + * stdout/stderr and uses node:readline for interactive prompts. Tests + * supply a deterministic in-memory IO via `DeployOptions.io`. + */ +export function createTerminalIO(): DeployIO { + let rl: readline.Interface | undefined; + + function ensureReadline(): readline.Interface { + if (!rl) { + rl = readline.createInterface({ input: process.stdin, output: process.stdout }); + } + return rl; + } + + return { + info(message: string) { + process.stdout.write(`${message}\n`); + }, + warn(message: string) { + process.stderr.write(`! ${message}\n`); + }, + error(message: string) { + process.stderr.write(`x ${message}\n`); + }, + async prompt(question, opts = {}) { + const suffix = opts.defaultValue !== undefined ? ` [${opts.defaultValue}]` : ''; + const answer = (await ensureReadline().question(`${question}${suffix} `)).trim(); + return answer.length > 0 ? answer : opts.defaultValue ?? ''; + }, + async confirm(question, opts = {}) { + const def = opts.defaultValue ?? false; + const suffix = def ? ' [Y/n]' : ' [y/N]'; + const answer = (await ensureReadline().question(`${question}${suffix} `)).trim().toLowerCase(); + if (answer === '') return def; + return answer === 'y' || answer === 'yes'; + } + }; +} + +/** + * Buffered IO collected for assertions. Used by tests to verify the + * orchestrator's message flow without touching the terminal. + */ +export interface BufferedIO extends DeployIO { + messages: Array<{ level: 'info' | 'warn' | 'error'; message: string }>; + /** Queue answers for upcoming prompts; FIFO. */ + scriptAnswers(values: string[]): void; + scriptConfirmations(values: boolean[]): void; +} + +export function createBufferedIO(): BufferedIO { + const messages: BufferedIO['messages'] = []; + const answerQueue: string[] = []; + const confirmQueue: boolean[] = []; + return { + messages, + info(message) { + messages.push({ level: 'info', message }); + }, + warn(message) { + messages.push({ level: 'warn', message }); + }, + error(message) { + messages.push({ level: 'error', message }); + }, + async prompt(_question, opts) { + return answerQueue.shift() ?? opts?.defaultValue ?? ''; + }, + async confirm(_question, opts) { + const next = confirmQueue.shift(); + return next ?? opts?.defaultValue ?? false; + }, + scriptAnswers(values) { + answerQueue.push(...values); + }, + scriptConfirmations(values) { + confirmQueue.push(...values); + } + }; +} diff --git a/packages/deploy/src/login.ts b/packages/deploy/src/login.ts new file mode 100644 index 0000000..71d4e66 --- /dev/null +++ b/packages/deploy/src/login.ts @@ -0,0 +1,42 @@ +import type { DeployIO } from './types.js'; + +/** + * Workspace authentication primitives. The CLI layer plugs in real + * implementations that talk to relayauth + the workforce cloud API; the + * deploy package itself stays SDK-free so the contract is easy to mock. + */ +export interface WorkspaceAuth { + /** Resolve the active workspace, prompting the user to pick one if needed. */ + resolveWorkspace(args: { override?: string; io: DeployIO }): Promise<{ + workspace: string; + /** Workspace-scoped token usable for gateway + cloud API calls. */ + token: string; + }>; +} + +/** + * Environment-backed fallback resolver: reads `WORKFORCE_WORKSPACE_ID` + * and `WORKFORCE_WORKSPACE_TOKEN` from `process.env`. Useful in CI and as + * a sane default before the CLI wires up the OAuth flow. + */ +export function envWorkspaceAuth(): WorkspaceAuth { + return { + async resolveWorkspace({ override, io }) { + const workspace = override ?? process.env.WORKFORCE_WORKSPACE_ID; + const token = process.env.WORKFORCE_WORKSPACE_TOKEN; + if (!workspace) { + io.error( + 'no workspace resolved: pass --workspace, set WORKFORCE_WORKSPACE_ID, or run `workforce login`' + ); + throw new Error('workspace is required for deploy'); + } + if (!token) { + io.error( + 'no workspace token resolved: set WORKFORCE_WORKSPACE_TOKEN, or run `workforce login` to mint one' + ); + throw new Error('workspace token is required for deploy'); + } + return { workspace, token }; + } + }; +} diff --git a/packages/deploy/src/modes/cloud.ts b/packages/deploy/src/modes/cloud.ts new file mode 100644 index 0000000..f416759 --- /dev/null +++ b/packages/deploy/src/modes/cloud.ts @@ -0,0 +1,28 @@ +import type { + ModeLaunchInput, + ModeLaunchHandle, + ModeLauncher +} from '../types.js'; + +/** + * Workforce-cloud-hosted deploy mode. Uploads the bundle to the workforce + * cloud deployments endpoint and lets the cloud runtime host the agent. + * + * The endpoint (`POST /api/v1/workspaces/:id/deployments`) is part of the + * proactive-runtime backend roadmap and is not yet live. Until it is, + * `--mode cloud` returns a clean error that points users at the working + * modes (`--mode sandbox` and `--mode dev`). + * + * When the endpoint ships, the implementation flow is: + * 1. POST persona.json + agent.bundle.mjs + runner.mjs as multipart. + * 2. Receive `{ deploymentId, statusUrl }`. + * 3. Poll `statusUrl` until the cloud reports `running`. + * 4. Return a handle whose `stop()` calls DELETE on the deployment. + */ +export const cloudLauncher: ModeLauncher = { + async launch(_input: ModeLaunchInput): Promise { + throw new Error( + '--mode cloud is not yet available: the workforce cloud deployments endpoint is in progress. Use --mode sandbox (Daytona) or --mode dev (local) today.' + ); + } +}; diff --git a/packages/deploy/src/modes/dev.ts b/packages/deploy/src/modes/dev.ts new file mode 100644 index 0000000..0a1b2ce --- /dev/null +++ b/packages/deploy/src/modes/dev.ts @@ -0,0 +1,166 @@ +import { spawn } from 'node:child_process'; +import { createRequire } from 'node:module'; +import { mkdir, rm, symlink } from 'node:fs/promises'; +import path from 'node:path'; +import type { Readable } from 'node:stream'; +import type { + ModeLaunchInput, + ModeLaunchHandle, + ModeLauncher +} from '../types.js'; + +const SIGTERM_TO_SIGKILL_MS = 5_000; + +const RUNTIME_PACKAGES = ['@agentworkforce/runtime', '@agentworkforce/persona-kit'] as const; + +/** + * Local dev-mode launcher. Spawns `node ` as a child + * process, forwards line-buffered stdout/stderr through the supplied + * DeployIO, and resolves `done` when the child exits. + * + * `stop()` sends SIGTERM and escalates to SIGKILL after 5s if the child + * hasn't exited cleanly. The parent's SIGINT/SIGTERM are forwarded too + * so Ctrl-C in `--mode dev` produces an orderly shutdown. + */ +export const devLauncher: ModeLauncher = { + async launch(input: ModeLaunchInput): Promise { + const runnerPath = input.bundle.runnerPath; + const cwd = path.dirname(runnerPath); + + // The generated runner imports `@agentworkforce/runtime`. In dev + // mode we resolve the package out of the parent workforce install + // and symlink it into the bundle's local node_modules so node's + // ESM resolver finds it without an `npm install` step. The link is + // idempotent: stale links are replaced on every launch. + await linkRuntimePackages(cwd); + + const env: NodeJS.ProcessEnv = { + ...process.env, + ...(input.env ?? {}), + WORKFORCE_WORKSPACE_ID: input.workspace, + WORKFORCE_PERSONA_ID: input.persona.id + }; + + const child = spawn(process.execPath, [runnerPath], { + cwd, + env, + stdio: ['pipe', 'pipe', 'pipe'] + }); + + // Bridge the parent process's stdin into the child runner. The + // runner reads NDJSON envelopes from its stdin, so any envelopes the + // user pipes into `workforce deploy --mode dev` flow straight into + // the runner without an intermediate file. + if (child.stdin) { + process.stdin.pipe(child.stdin); + // When the parent's stdin closes (EOF / piped input drained), end + // the child's stdin too so the runner's for-await loop terminates. + process.stdin.once('end', () => { + child.stdin?.end(); + }); + } + + if (child.pid === undefined) { + throw new Error('dev launcher: failed to spawn runner (no pid assigned)'); + } + + forwardLines(child.stdout, (line) => input.io.info(`[runtime] ${line}`)); + forwardLines(child.stderr, (line) => input.io.warn(`[runtime] ${line}`)); + + const done = new Promise<{ code: number }>((resolveDone) => { + child.once('exit', (code, signal) => { + const exitCode = typeof code === 'number' ? code : signal ? signalExit(signal) : 0; + resolveDone({ code: exitCode }); + }); + }); + + let stopping = false; + const stop = async (): Promise => { + if (stopping) { + await done; + return; + } + stopping = true; + child.stdin?.end(); + child.kill('SIGTERM'); + const escalation = setTimeout(() => { + if (!child.killed) child.kill('SIGKILL'); + }, SIGTERM_TO_SIGKILL_MS).unref(); + try { + await done; + } finally { + clearTimeout(escalation); + } + }; + + // Bridge parent-process signal handlers so Ctrl-C in --mode dev + // produces a clean child shutdown (and not an orphaned runner). + const forwardParentSignal = (signal: NodeJS.Signals) => { + void stop().catch(() => { + /* stop already drained */ + }); + process.off('SIGINT', forwardParentSignal as never); + process.off('SIGTERM', forwardParentSignal as never); + // Re-raise so the parent's normal exit semantics take over after + // the child closes down. + process.kill(process.pid, signal); + }; + process.once('SIGINT', forwardParentSignal); + process.once('SIGTERM', forwardParentSignal); + + return { + id: `pid:${child.pid}`, + stop, + done + }; + } +}; + +function forwardLines(stream: Readable, write: (line: string) => void): void { + let buffered = ''; + stream.setEncoding('utf8'); + stream.on('data', (chunk: string) => { + buffered += chunk; + let nl = buffered.indexOf('\n'); + while (nl !== -1) { + const line = buffered.slice(0, nl).replace(/\r$/, ''); + buffered = buffered.slice(nl + 1); + if (line.length > 0) write(line); + nl = buffered.indexOf('\n'); + } + }); + stream.on('end', () => { + const tail = buffered.trim(); + if (tail.length > 0) write(tail); + }); +} + +async function linkRuntimePackages(bundleDir: string): Promise { + const nodeModulesDir = path.join(bundleDir, 'node_modules'); + const scopeDir = path.join(nodeModulesDir, '@agentworkforce'); + await mkdir(scopeDir, { recursive: true }); + + // Resolve each package's installed root by asking node where its + // `package.json` lives, then symlink that root into our bundle's + // node_modules. Using `require.resolve` guarantees we point at the + // package the deploy package itself imports — no env var dance. + const localRequire = createRequire(import.meta.url); + for (const pkg of RUNTIME_PACKAGES) { + const manifestPath = localRequire.resolve(`${pkg}/package.json`); + const packageRoot = path.dirname(manifestPath); + const linkPath = path.join(scopeDir, pkg.slice('@agentworkforce/'.length)); + await rm(linkPath, { recursive: true, force: true }); + await symlink(packageRoot, linkPath, 'dir'); + } +} + +function signalExit(signal: NodeJS.Signals): number { + // Match the POSIX convention for terminated children. + const SIGNAL_MAP: Partial> = { + SIGINT: 130, + SIGTERM: 143, + SIGKILL: 137, + SIGHUP: 129 + }; + return SIGNAL_MAP[signal] ?? 1; +} diff --git a/packages/deploy/src/modes/sandbox.ts b/packages/deploy/src/modes/sandbox.ts new file mode 100644 index 0000000..02a5658 --- /dev/null +++ b/packages/deploy/src/modes/sandbox.ts @@ -0,0 +1,175 @@ +import { readFile } from 'node:fs/promises'; +import path from 'node:path'; +import { Daytona, type Sandbox } from '@daytonaio/sdk'; +import type { + ModeLaunchInput, + ModeLaunchHandle, + ModeLauncher +} from '../types.js'; + +/** + * Working directory the runner is invoked from inside the sandbox. Same + * mount root cloud's DaytonaRuntime defaults to, so persona authors get + * consistent paths whether they run under workforce or cloud workflows. + */ +const SANDBOX_BUNDLE_DIR = '/home/daytona/bundle'; + +/** + * Daytona authentication resolved before sandbox creation. Workforce + * supports two paths: + * + * - BYO — `DAYTONA_API_KEY` (+ optional `DAYTONA_ORGANIZATION_ID`) + * in the user's env. Zero workforce-cloud roundtrips. + * - Workforce-managed — a workspace token mints a Daytona JWT against + * the workforce cloud API (`POST /workspaces/:id/sandboxes`). + * Lights up once the cloud endpoint ships. + */ +export interface SandboxAuth { + apiKey?: string; + jwtToken?: string; + organizationId?: string; +} + +export function resolveSandboxAuth(): SandboxAuth | undefined { + const apiKey = process.env.DAYTONA_API_KEY; + const jwtToken = process.env.DAYTONA_JWT_TOKEN; + const organizationId = process.env.DAYTONA_ORGANIZATION_ID; + if (!apiKey && !jwtToken) return undefined; + return { + ...(apiKey ? { apiKey } : {}), + ...(jwtToken ? { jwtToken } : {}), + ...(organizationId ? { organizationId } : {}) + }; +} + +/** + * Daytona-backed sandbox launcher. Creates a TypeScript sandbox, uploads + * the bundle, and starts the runner with a long timeout (effectively + * unlimited for cron-driven agents). The sandbox stays alive after the + * exec call returns so subsequent envelopes the runner expects on stdin + * have a place to land — see `stop()` for the explicit teardown contract. + * + * Streaming: Daytona's `executeCommand` is final-result-only. The runner + * exits when its envelope stream ends, and the resulting stdout/stderr + * blob is forwarded to the DeployIO at that point. Live tail support + * would require `process.createSession`, which is gated on a future + * iteration. + */ +export const sandboxLauncher: ModeLauncher = { + async launch(input: ModeLaunchInput): Promise { + const auth = resolveSandboxAuth(); + if (!auth) { + throw new Error( + 'sandbox launcher: no Daytona credentials resolved. Either export DAYTONA_API_KEY (BYO) or run `workforce login` to mint a workforce-managed Daytona token.' + ); + } + + const daytona = new Daytona(auth); + const sandbox = await daytona.create({ + language: 'typescript', + name: `wf-${input.persona.id}`, + envVars: { + ...(input.env ?? {}), + WORKFORCE_WORKSPACE_ID: input.workspace, + WORKFORCE_PERSONA_ID: input.persona.id + } + }); + + try { + await uploadBundle(sandbox, input); + } catch (err) { + // If upload fails, the sandbox is unrecoverable for this deploy. + // Tear it down so we don't leak Daytona resources. + await sandbox.delete().catch(() => undefined); + throw err; + } + + const sandboxTimeoutSeconds = resolveTimeoutSeconds(input.persona.sandbox); + + let stopping = false; + let runner: Promise<{ code: number }> | undefined; + + const stop = async (): Promise => { + if (stopping) return; + stopping = true; + try { + await sandbox.delete(); + } catch (err) { + input.io.warn( + `sandbox: cleanup failed: ${err instanceof Error ? err.message : String(err)}` + ); + } + }; + + runner = (async () => { + try { + const result = await sandbox.process.executeCommand( + 'node runner.mjs', + SANDBOX_BUNDLE_DIR, + undefined, + sandboxTimeoutSeconds + ); + const output = (result.result ?? '').trim(); + if (output.length > 0) input.io.info(`[sandbox] ${output}`); + const exitCode = result.exitCode ?? 0; + return { code: exitCode }; + } catch (err) { + if (!stopping) { + input.io.error( + `sandbox: runner exec failed: ${err instanceof Error ? err.message : String(err)}` + ); + } + return { code: 1 }; + } + })(); + + return { + id: `sandbox:${sandbox.id}`, + stop, + done: runner + }; + } +}; + +async function uploadBundle(sandbox: Sandbox, input: ModeLaunchInput): Promise { + // Bundle artifacts are tiny (KB-range), so reading them into Buffers + // before upload is the simplest correct shape. If/when bundles grow to + // the MB range we revisit streaming. + const files = await Promise.all([ + fileUpload(input.bundle.runnerPath, `${SANDBOX_BUNDLE_DIR}/runner.mjs`), + fileUpload(input.bundle.bundlePath, `${SANDBOX_BUNDLE_DIR}/agent.bundle.mjs`), + fileUpload(input.bundle.personaCopyPath, `${SANDBOX_BUNDLE_DIR}/persona.json`), + fileUpload(input.bundle.packageJsonPath, `${SANDBOX_BUNDLE_DIR}/package.json`) + ]); + await sandbox.fs.uploadFiles(files); + + // The bundle's package.json declares `@agentworkforce/runtime` as a + // dependency. The sandbox starts from a clean tsx baseline, so we + // resolve the runtime via `npm install` before the runner can import + // it. Install runs once per sandbox lifetime; long-lived agents + // pay the cost only at cold-start. + const install = await sandbox.process.executeCommand( + 'npm install --prefer-offline --no-audit --no-fund --loglevel=error @agentworkforce/runtime@latest', + SANDBOX_BUNDLE_DIR, + undefined, + 600 + ); + if ((install.exitCode ?? 0) !== 0) { + throw new Error( + `sandbox: npm install failed (exit ${install.exitCode}): ${install.result?.slice(0, 400) ?? ''}` + ); + } +} + +async function fileUpload(localPath: string, remotePath: string): Promise<{ source: Buffer; destination: string }> { + const source = await readFile(localPath); + return { source, destination: remotePath }; +} + +function resolveTimeoutSeconds(sandbox: ModeLaunchInput['persona']['sandbox']): number | undefined { + if (sandbox === undefined || sandbox === true || sandbox === false) return undefined; + if (typeof sandbox.timeoutSeconds === 'number' && sandbox.timeoutSeconds > 0) { + return sandbox.timeoutSeconds; + } + return undefined; +} diff --git a/packages/deploy/src/preflight.ts b/packages/deploy/src/preflight.ts new file mode 100644 index 0000000..01389ba --- /dev/null +++ b/packages/deploy/src/preflight.ts @@ -0,0 +1,107 @@ +import { readFile, stat } from 'node:fs/promises'; +import path from 'node:path'; +import { + lintTriggers, + parsePersonaSpec, + type PersonaIntent, + type PersonaSpec +} from '@agentworkforce/persona-kit'; +import type { DeployPreflight } from './types.js'; + +/** + * Load + parse + validate a persona for the deploy surface. Returns the + * frozen-shape preflight on success, throws with a field-pointed error + * on validation failure. + * + * Deploy preflight is stricter than the persona-kit parser: the parser + * accepts any persona, valid or not for deploy; this function enforces + * the deploy-specific cross-field rules (cloud:true, onEvent present when + * triggers exist, onEvent file actually on disk, etc.) so the orchestrator + * never gets a half-valid spec. + */ +export async function preflightPersona(personaPath: string): Promise { + const absPath = path.resolve(personaPath); + const personaDir = path.dirname(absPath); + + const raw = await readFile(absPath, 'utf8').catch((err: NodeJS.ErrnoException) => { + if (err.code === 'ENOENT') { + throw new Error(`persona JSON not found at ${absPath}`); + } + throw err; + }); + + let json: unknown; + try { + json = JSON.parse(raw); + } catch (err) { + throw new Error( + `persona JSON at ${absPath} is not valid JSON: ${err instanceof Error ? err.message : String(err)}` + ); + } + + if (typeof json !== 'object' || json === null) { + throw new Error(`persona JSON at ${absPath} must be a top-level object`); + } + + // The persona-kit parser is intent-aware; we pass the intent it declares + // back to itself so the check is self-consistent (parsePersonaSpec + // enforces that `intent` matches `expectedIntent` to catch type-collated + // mistakes in built-in catalogs). For loose deploy use, mirror the + // declared intent. + const declaredIntent = (json as { intent?: unknown }).intent; + if (typeof declaredIntent !== 'string' || !declaredIntent) { + throw new Error(`persona JSON at ${absPath} is missing top-level "intent"`); + } + + const persona: PersonaSpec = parsePersonaSpec(json, declaredIntent as PersonaIntent); + + if (persona.cloud !== true) { + throw new Error( + `persona "${persona.id}" is not opted into deploy (set "cloud": true to enable workforce deploy)` + ); + } + + const hasIntegrationTriggers = !!persona.integrations && + Object.values(persona.integrations).some((cfg) => (cfg.triggers?.length ?? 0) > 0); + const hasSchedules = (persona.schedules?.length ?? 0) > 0; + + if (!hasIntegrationTriggers && !hasSchedules) { + throw new Error( + `persona "${persona.id}" declares cloud:true but has no triggers (add at least one schedule or integration trigger)` + ); + } + + if (!persona.onEvent) { + throw new Error( + `persona "${persona.id}" declares cloud:true but is missing "onEvent" (path to the handler file)` + ); + } + + const onEventPath = path.resolve(personaDir, persona.onEvent); + const onEventStat = await stat(onEventPath).catch((err: NodeJS.ErrnoException) => { + if (err.code === 'ENOENT') { + throw new Error( + `persona "${persona.id}" onEvent file not found at ${onEventPath} (relative to ${personaDir})` + ); + } + throw err; + }); + if (!onEventStat.isFile()) { + throw new Error(`onEvent path ${onEventPath} is not a regular file`); + } + + const triggerLint = lintTriggers(persona); + const warnings = triggerLint.map( + (issue) => `${issue.path}: ${issue.message}` + ); + + return { + persona, + personaPath: absPath, + personaDir, + onEventPath, + schedules: (persona.schedules ?? []).map((s) => s.name), + integrations: persona.integrations ? Object.keys(persona.integrations) : [], + warnings + }; +} diff --git a/packages/deploy/src/types.ts b/packages/deploy/src/types.ts new file mode 100644 index 0000000..7277d3f --- /dev/null +++ b/packages/deploy/src/types.ts @@ -0,0 +1,138 @@ +import type { PersonaSpec } from '@agentworkforce/persona-kit'; + +export type DeployMode = 'dev' | 'sandbox' | 'cloud'; + +export interface DeployOptions { + /** Absolute path to the persona JSON file. Required. */ + personaPath: string; + /** Run mode. Defaults to `sandbox` if Daytona creds resolve, else `dev`. */ + mode?: DeployMode; + /** Workforce workspace to deploy into. Defaults to the active workspace. */ + workspace?: string; + /** Skip the integration-connect prompts; fail if any declared integration is missing. */ + noConnect?: boolean; + /** Force BYO Daytona even when workforce-managed sandbox issuance is available. */ + byoSandbox?: boolean; + /** Background the runner instead of streaming logs in the foreground. */ + detach?: boolean; + /** Emit the bundle to this directory and exit (no launch). */ + bundleOut?: string; + /** Validate-only: parse + lint + check connection status, no side effects. */ + dryRun?: boolean; + /** Override the WORKFORCE_CLOUD_URL; defaults to env or production. */ + cloudUrl?: string; + /** Override stdout writer for tests + structured outputs. */ + io?: DeployIO; +} + +export interface DeployIO { + /** User-facing progress line (clean prose, suitable for terminals). */ + info(message: string): void; + /** Warning line; rendered with a marker the user reads. */ + warn(message: string): void; + /** Error line; non-fatal context. */ + error(message: string): void; + /** Interactive prompt; resolves to user's answer. */ + prompt(question: string, opts?: { defaultValue?: string }): Promise; + /** Confirmation prompt; resolves to true/false. */ + confirm(question: string, opts?: { defaultValue?: boolean }): Promise; +} + +/** The result returned by a successful `deploy(...)` call. */ +export interface DeployResult { + /** Resolved deployment identifier (stable across restarts of the same persona). */ + deploymentId: string; + /** Which run mode the orchestrator picked. */ + mode: DeployMode; + /** Workspace the deployment is scoped to. */ + workspace: string; + /** Path to the staged bundle on disk. */ + bundleDir: string; + /** Integrations that were connected (or already-connected) as part of this deploy. */ + connectedIntegrations: string[]; + /** Schedules registered with the runtime. */ + schedules: string[]; + /** Run-mode-specific handle. `dev` returns a child process handle; `sandbox` a Daytona sandbox id; `cloud` a server-side deployment id. */ + runHandle?: unknown; + /** Non-fatal warnings collected during deploy. */ + warnings: string[]; +} + +/** + * Contract the deploy orchestrator expects from the bundle stager. The + * default implementation lives in `bundle.ts` (esbuild-driven); callers + * pass an alternative via `DeployResolvers.bundle` to swap bundlers or + * inject test fakes. + */ +export interface BundleStager { + stage(input: BundleStageInput): Promise; +} + +export interface BundleStageInput { + personaPath: string; + persona: PersonaSpec; + outDir: string; + bundlerOptions?: { minify?: boolean }; +} + +export interface BundleResult { + personaCopyPath: string; + runnerPath: string; + bundlePath: string; + packageJsonPath: string; + sizeBytes: number; +} + +/** + * Contract each run-mode launcher implements. The defaults live next + * to this file: `modes/dev.ts` (local child_process), `modes/sandbox.ts` + * (Daytona), and `modes/cloud.ts` (workforce-cloud hosted, opt-in once + * the cloud deployments endpoint ships). Callers swap individual modes + * via `DeployResolvers.modes` — useful for tests and custom runtimes. + */ +export interface ModeLauncher { + launch(input: ModeLaunchInput): Promise; +} + +export interface ModeLaunchInput { + persona: PersonaSpec; + bundle: BundleResult; + workspace: string; + env?: Record; + io: DeployIO; + detach?: boolean; +} + +export interface ModeLaunchHandle { + /** Mode-specific identifier (pid for dev, sandboxId for sandbox, deploymentId for cloud). */ + id: string; + /** Stop the runner cleanly. */ + stop(): Promise; + /** + * Resolves when the runner exits. For long-lived modes (sandbox), this + * resolves only when the user invokes `stop()`. + */ + done: Promise<{ code: number }>; +} + +export interface IntegrationConnectOutcome { + provider: string; + status: 'already-connected' | 'connected-now' | 'skipped' | 'failed'; + message?: string; +} + +/** Surface a parsed persona only after we know it passed the deploy preflight. */ +export interface DeployPreflight { + persona: PersonaSpec; + /** Persona JSON path resolved to absolute. */ + personaPath: string; + /** Absolute path to the directory containing the persona JSON. */ + personaDir: string; + /** Absolute path to the resolved `onEvent` file. */ + onEventPath: string; + /** Schedules and integrations summarized. */ + schedules: string[]; + integrations: string[]; + /** Non-fatal warnings (unknown triggers, etc). */ + warnings: string[]; +} diff --git a/packages/deploy/tsconfig.json b/packages/deploy/tsconfig.json new file mode 100644 index 0000000..df59da5 --- /dev/null +++ b/packages/deploy/tsconfig.json @@ -0,0 +1,8 @@ +{ + "extends": "../../tsconfig.base.json", + "compilerOptions": { + "rootDir": "src", + "outDir": "dist" + }, + "include": ["src/**/*.ts"] +} diff --git a/packages/persona-kit/package.json b/packages/persona-kit/package.json index d7bbe8c..74eaff4 100644 --- a/packages/persona-kit/package.json +++ b/packages/persona-kit/package.json @@ -9,7 +9,8 @@ ".": { "types": "./dist/index.d.ts", "default": "./dist/index.js" - } + }, + "./package.json": "./package.json" }, "files": [ "dist", diff --git a/packages/persona-kit/src/index.ts b/packages/persona-kit/src/index.ts index 0fbc4c4..d02209b 100644 --- a/packages/persona-kit/src/index.ts +++ b/packages/persona-kit/src/index.ts @@ -25,15 +25,24 @@ export type { PersonaContext, PersonaInputSpec, PersonaInstallContext, + PersonaIntegrationConfig, + PersonaIntegrationTrigger, PersonaIntent, + PersonaMemory, + PersonaMemoryConfig, + PersonaMemoryScope, PersonaMount, PersonaPermissions, PersonaRuntime, + PersonaSandbox, + PersonaSandboxConfig, + PersonaSchedule, PersonaSelection, PersonaSkill, PersonaSpec, PersonaTag, PersonaTier, + PersonaTraits, SidecarMdMode, SkillInstall, SkillMaterializationOptions, @@ -55,19 +64,37 @@ export { isTier, parseHarnessSettings, parseInputs, + parseIntegrationConfig, + parseIntegrationTrigger, + parseIntegrations, parseMcpServers, + parseMemory, parseMount, + parseOnEvent, parsePermissions, parsePersonaSpec, parseRuntime, + parseSandbox, + parseSchedules, parseSkills, parseStringList, parseStringMap, parseTags, + parseTraits, resolveSidecar, sidecarSelectionFields } from './parse.js'; +// Trigger registry + lint helper +export { + KNOWN_TRIGGERS, + lintTriggers, + type KnownProviderName, + type KnownTriggerName, + type TriggerLintIssue, + type TriggerLintLevel +} from './triggers.js'; + // Skill materialization export { buildCleanupArtifacts, diff --git a/packages/persona-kit/src/parse.test.ts b/packages/persona-kit/src/parse.test.ts index eea7a43..a1b6dc3 100644 --- a/packages/persona-kit/src/parse.test.ts +++ b/packages/persona-kit/src/parse.test.ts @@ -5,15 +5,21 @@ import { assertSidecarPath, INPUT_NAME_RE, parseHarnessSettings, + parseIntegrations, parseInputs, + parseMemory, parseMcpServers, parseMount, + parseOnEvent, parsePermissions, parsePersonaSpec, + parseSandbox, + parseSchedules, parseSkills, parseStringList, parseStringMap, - parseTags + parseTags, + parseTraits } from './parse.js'; const baseRuntime = { @@ -49,6 +55,39 @@ test('parsePersonaSpec strips unknown top-level fields silently', () => { assert.ok(!('extra' in spec)); }); +test('parsePersonaSpec accepts deploy-v1 optional fields', () => { + const spec = parsePersonaSpec( + validSpec({ + cloud: true, + useSubscription: true, + integrations: { + github: { + scope: { repo: 'AgentWorkforce/workforce' }, + triggers: [{ on: 'pull_request.opened' }] + } + }, + schedules: [{ name: 'weekly', cron: '0 9 * * 6', tz: 'UTC' }], + sandbox: { enabled: true, timeoutSeconds: 1800, env: { NODE_ENV: 'production' } }, + memory: { enabled: true, scopes: ['workspace'], ttlDays: 30 }, + traits: { voice: 'professional-warm', preferMarkdown: true }, + onEvent: './agent.ts' + }), + 'documentation' + ); + + assert.equal(spec.cloud, true); + assert.equal(spec.integrations?.github.triggers?.[0].on, 'pull_request.opened'); + assert.equal(spec.schedules?.[0].name, 'weekly'); + assert.deepEqual(spec.sandbox, { + enabled: true, + timeoutSeconds: 1800, + env: { NODE_ENV: 'production' } + }); + assert.deepEqual(spec.memory, { enabled: true, scopes: ['workspace'], ttlDays: 30 }); + assert.equal(spec.traits?.preferMarkdown, true); + assert.equal(spec.onEvent, './agent.ts'); +}); + test('parsePersonaSpec throws when intent does not match the expected intent', () => { assert.throws( () => parsePersonaSpec(validSpec({ intent: 'review' }), 'documentation'), @@ -304,3 +343,198 @@ test('parsePersonaSpec preserves defaultTier when valid and rejects when invalid /defaultTier must be one of:/ ); }); + +// --- deploy-v1 schema additions ---------------------------------------------- + +test('parseSandbox accepts boolean shorthand and round-trips both forms', () => { + assert.equal(parseSandbox(true, 'sandbox'), true); + assert.equal(parseSandbox(false, 'sandbox'), false); + assert.equal(parseSandbox(undefined, 'sandbox'), undefined); + const obj = parseSandbox( + { enabled: true, timeoutSeconds: 600, env: { FOO: 'bar' } }, + 'sandbox' + ); + assert.deepEqual(obj, { enabled: true, timeoutSeconds: 600, env: { FOO: 'bar' } }); +}); + +test('parseSandbox rejects malformed objects with field-pointed errors', () => { + assert.throws(() => parseSandbox('on', 'sandbox'), /sandbox must be a boolean or an object/); + assert.throws( + () => parseSandbox({ enabled: 'yes' }, 'sandbox'), + /sandbox\.enabled must be a boolean/ + ); + assert.throws( + () => parseSandbox({ timeoutSeconds: -1 }, 'sandbox'), + /sandbox\.timeoutSeconds must be a positive number/ + ); + assert.throws( + () => parseSandbox({ timeoutSeconds: Number.POSITIVE_INFINITY }, 'sandbox'), + /sandbox\.timeoutSeconds must be a positive number/ + ); +}); + +test('parseMemory accepts boolean + object forms and validates scopes', () => { + assert.equal(parseMemory(true, 'memory'), true); + assert.equal(parseMemory(false, 'memory'), false); + assert.equal(parseMemory(undefined, 'memory'), undefined); + const m = parseMemory( + { enabled: true, scopes: ['user', 'user', 'workspace'], ttlDays: 7, autoPromote: true, dedupMs: 0 }, + 'memory' + ); + // Duplicates are deduped while preserving first-seen order. + assert.deepEqual(m, { + enabled: true, + scopes: ['user', 'workspace'], + ttlDays: 7, + autoPromote: true, + dedupMs: 0 + }); +}); + +test('parseMemory rejects unknown scopes and non-positive ttl', () => { + assert.throws( + () => parseMemory({ scopes: ['planet'] }, 'memory'), + /memory\.scopes\[0\] must be one of: session, user, workspace, org, object/ + ); + assert.throws(() => parseMemory({ scopes: [] }, 'memory'), /scopes must be a non-empty array/); + assert.throws(() => parseMemory({ ttlDays: 0 }, 'memory'), /ttlDays must be a positive number/); + assert.throws(() => parseMemory({ dedupMs: -1 }, 'memory'), /dedupMs must be a non-negative number/); +}); + +test('parseTraits keeps only supplied fields and validates enums', () => { + assert.equal(parseTraits(undefined, 'traits'), undefined); + assert.equal(parseTraits({}, 'traits'), undefined); // empty object collapses to undefined + const t = parseTraits( + { + voice: 'concise', + formality: 'low', + proactivity: 'high', + riskPosture: 'balanced', + domain: 'engineering', + vocabulary: ['PR', 'diff'], + preferMarkdown: true + }, + 'traits' + ); + assert.deepEqual(t, { + voice: 'concise', + formality: 'low', + proactivity: 'high', + riskPosture: 'balanced', + domain: 'engineering', + vocabulary: ['PR', 'diff'], + preferMarkdown: true + }); + assert.throws( + () => parseTraits({ formality: 'extreme' }, 'traits'), + /traits\.formality must be one of: low, medium, high/ + ); + assert.throws( + () => parseTraits({ riskPosture: 'wild' }, 'traits'), + /traits\.riskPosture must be one of: conservative, balanced, aggressive/ + ); +}); + +test('parseSchedules validates cron, requires unique names, preserves tz when set', () => { + const s = parseSchedules( + [ + { name: 'morning', cron: '0 9 * * 1-5', tz: 'America/New_York' }, + { name: 'sweep', cron: '*/15 0,12 * * *' } + ], + 'schedules' + ); + assert.deepEqual(s, [ + { name: 'morning', cron: '0 9 * * 1-5', tz: 'America/New_York' }, + { name: 'sweep', cron: '*/15 0,12 * * *' } + ]); + + assert.throws( + () => + parseSchedules( + [ + { name: 'dup', cron: '0 9 * * *' }, + { name: 'dup', cron: '0 10 * * *' } + ], + 'schedules' + ), + /duplicates an earlier schedule/ + ); + assert.throws( + () => parseSchedules([{ name: 'short', cron: '0 9 * *' }], 'schedules'), + /must be a 5-field cron expression/ + ); + assert.throws( + () => parseSchedules([{ name: 'bad', cron: '0 9 * * MON' }], 'schedules'), + /is not a valid cron token/ + ); + assert.equal(parseSchedules(undefined, 'schedules'), undefined); + assert.equal(parseSchedules([], 'schedules'), undefined); +}); + +test('parseIntegrations preserves scope + triggers; rejects empty trigger arrays', () => { + const i = parseIntegrations( + { + github: { + scope: { repo: 'org/r' }, + triggers: [ + { on: 'pull_request.opened' }, + { on: 'issue_comment.created', match: '@mention' } + ] + }, + linear: {} // no scope, no triggers — still a declared integration + }, + 'integrations' + ); + assert.equal(i?.github.scope?.repo, 'org/r'); + assert.equal(i?.github.triggers?.length, 2); + assert.equal(i?.github.triggers?.[1].match, '@mention'); + assert.deepEqual(i?.linear, {}); + + assert.throws( + () => + parseIntegrations( + { github: { triggers: [] } }, + 'integrations' + ), + /triggers must contain at least one entry/ + ); + assert.throws( + () => + parseIntegrations( + { github: { triggers: [{ on: '' }] } }, + 'integrations' + ), + /triggers\[0\]\.on must be a non-empty string/ + ); +}); + +test('parseOnEvent enforces relative path with a supported extension', () => { + assert.equal(parseOnEvent('./agent.ts', 'onEvent'), './agent.ts'); + assert.equal(parseOnEvent('handlers/main.mjs', 'onEvent'), 'handlers/main.mjs'); + assert.equal(parseOnEvent(undefined, 'onEvent'), undefined); + + assert.throws(() => parseOnEvent('/abs/agent.ts', 'onEvent'), /must be a relative POSIX path/); + assert.throws(() => parseOnEvent('a/../b.ts', 'onEvent'), /must not contain ".." segments/); + assert.throws(() => parseOnEvent('agent.py', 'onEvent'), /must point at a \.ts/); + assert.throws(() => parseOnEvent('', 'onEvent'), /must be a non-empty string/); +}); + +test('parsePersonaSpec rejects non-boolean cloud / useSubscription', () => { + assert.throws( + () => parsePersonaSpec(validSpec({ cloud: 'yes' }), 'documentation'), + /cloud must be a boolean/ + ); + assert.throws( + () => parsePersonaSpec(validSpec({ useSubscription: 1 }), 'documentation'), + /useSubscription must be a boolean/ + ); +}); + +test('parsePersonaSpec keeps boolean shorthand sandbox / memory through round-trip', () => { + const spec = parsePersonaSpec( + validSpec({ cloud: true, sandbox: true, memory: false }), + 'documentation' + ); + assert.equal(spec.sandbox, true); + assert.equal(spec.memory, false); +}); diff --git a/packages/persona-kit/src/parse.ts b/packages/persona-kit/src/parse.ts index c894ec2..99cda84 100644 --- a/packages/persona-kit/src/parse.ts +++ b/packages/persona-kit/src/parse.ts @@ -16,15 +16,24 @@ import type { McpServerSpec, PermissionMode, PersonaInputSpec, + PersonaIntegrationConfig, + PersonaIntegrationTrigger, PersonaIntent, + PersonaMemory, + PersonaMemoryConfig, + PersonaMemoryScope, PersonaMount, PersonaPermissions, PersonaRuntime, + PersonaSandbox, + PersonaSandboxConfig, + PersonaSchedule, PersonaSelection, PersonaSkill, PersonaSpec, PersonaTag, PersonaTier, + PersonaTraits, SidecarMdMode } from './types.js'; @@ -450,6 +459,320 @@ export function parseMcpServers( return out; } +const MEMORY_SCOPE_VALUES: readonly PersonaMemoryScope[] = [ + 'session', + 'user', + 'workspace', + 'org', + 'object' +]; + +const TRAIT_LEVEL_VALUES = ['low', 'medium', 'high'] as const; +const TRAIT_RISK_VALUES = ['conservative', 'balanced', 'aggressive'] as const; + +const ONEVENT_EXT_RE = /\.(?:ts|tsx|mts|cts|js|mjs|cjs)$/i; + +// Standard 5-field cron: minute hour day-of-month month day-of-week. Each +// field is `*`, an integer, a range (`1-5`), a list (`1,3,5`), or a step +// like every-N (`*` slash `15`) or `5-25` slash `5`. Names like `MON` / `JAN` +// are deliberately not allowed at parse time; the runtime is the source of +// truth for what schedulers accept, and unknown shapes should propagate as +// runtime errors rather than silently passing through here. +const CRON_FIELD_RE = /^(?:\*|(?:\d+(?:-\d+)?)(?:,\d+(?:-\d+)?)*)(?:\/\d+)?$/; + +function assertOnEventPath(value: unknown, context: string): string { + if (typeof value !== 'string' || !value.trim()) { + throw new Error(`${context} must be a non-empty string`); + } + if (value.startsWith('/')) { + throw new Error(`${context} must be a relative POSIX path; got absolute "${value}"`); + } + const segments = value.split(/[\\/]+/); + if (segments.some((s) => s === '..')) { + throw new Error(`${context} must not contain ".." segments`); + } + if (!ONEVENT_EXT_RE.test(value)) { + throw new Error( + `${context} must point at a .ts/.tsx/.mts/.cts/.js/.mjs/.cjs file; got "${value}"` + ); + } + return value; +} + +function assertCronExpression(value: string, context: string): void { + const fields = value.trim().split(/\s+/); + if (fields.length !== 5) { + throw new Error(`${context} must be a 5-field cron expression; got ${fields.length} field(s)`); + } + for (let i = 0; i < fields.length; i += 1) { + const field = fields[i]; + if (!CRON_FIELD_RE.test(field)) { + throw new Error(`${context} field ${i + 1} is not a valid cron token: "${field}"`); + } + } +} + +export function parseIntegrationTrigger( + value: unknown, + context: string +): PersonaIntegrationTrigger { + if (!isObject(value)) { + throw new Error(`${context} must be an object`); + } + const { on, match, where } = value; + if (typeof on !== 'string' || !on.trim()) { + throw new Error(`${context}.on must be a non-empty string`); + } + if (match !== undefined && (typeof match !== 'string' || !match.trim())) { + throw new Error(`${context}.match must be a non-empty string if provided`); + } + if (where !== undefined && (typeof where !== 'string' || !where.trim())) { + throw new Error(`${context}.where must be a non-empty string if provided`); + } + return { + on, + ...(typeof match === 'string' ? { match } : {}), + ...(typeof where === 'string' ? { where } : {}) + }; +} + +export function parseIntegrationConfig( + value: unknown, + context: string +): PersonaIntegrationConfig { + if (!isObject(value)) { + throw new Error(`${context} must be an object`); + } + const { scope, triggers } = value; + + const out: PersonaIntegrationConfig = {}; + + if (scope !== undefined) { + const parsedScope = parseStringMap(scope, `${context}.scope`); + if (parsedScope && Object.keys(parsedScope).length > 0) { + out.scope = parsedScope; + } + } + + if (triggers !== undefined) { + if (!Array.isArray(triggers)) { + throw new Error(`${context}.triggers must be an array if provided`); + } + if (triggers.length === 0) { + throw new Error( + `${context}.triggers must contain at least one entry if provided (omit the field to declare an integration with no event triggers)` + ); + } + out.triggers = triggers.map((entry, idx) => + parseIntegrationTrigger(entry, `${context}.triggers[${idx}]`) + ); + } + + return out; +} + +export function parseIntegrations( + value: unknown, + context: string +): Record | undefined { + if (value === undefined) return undefined; + if (!isObject(value)) { + throw new Error(`${context} must be an object if provided`); + } + + const out: Record = {}; + for (const [provider, raw] of Object.entries(value)) { + if (!provider.trim()) { + throw new Error(`${context} integration keys must be non-empty strings`); + } + out[provider] = parseIntegrationConfig(raw, `${context}.${provider}`); + } + + return Object.keys(out).length > 0 ? out : undefined; +} + +export function parseSchedules( + value: unknown, + context: string +): PersonaSchedule[] | undefined { + if (value === undefined) return undefined; + if (!Array.isArray(value)) { + throw new Error(`${context} must be an array if provided`); + } + if (value.length === 0) return undefined; + + const seenNames = new Set(); + const out: PersonaSchedule[] = []; + for (const [idx, entry] of value.entries()) { + const entryContext = `${context}[${idx}]`; + if (!isObject(entry)) { + throw new Error(`${entryContext} must be an object`); + } + const { name, cron, tz } = entry; + if (typeof name !== 'string' || !name.trim()) { + throw new Error(`${entryContext}.name must be a non-empty string`); + } + if (seenNames.has(name)) { + throw new Error(`${entryContext}.name "${name}" duplicates an earlier schedule`); + } + seenNames.add(name); + if (typeof cron !== 'string' || !cron.trim()) { + throw new Error(`${entryContext}.cron must be a non-empty string`); + } + assertCronExpression(cron, `${entryContext}.cron`); + if (tz !== undefined && (typeof tz !== 'string' || !tz.trim())) { + throw new Error(`${entryContext}.tz must be a non-empty string if provided`); + } + out.push({ + name, + cron, + ...(typeof tz === 'string' ? { tz } : {}) + }); + } + return out; +} + +export function parseSandbox(value: unknown, context: string): PersonaSandbox | undefined { + if (value === undefined) return undefined; + if (typeof value === 'boolean') return value; + if (!isObject(value)) { + throw new Error(`${context} must be a boolean or an object if provided`); + } + const { enabled, timeoutSeconds, env } = value; + const out: PersonaSandboxConfig = {}; + if (enabled !== undefined) { + if (typeof enabled !== 'boolean') { + throw new Error(`${context}.enabled must be a boolean if provided`); + } + out.enabled = enabled; + } + if (timeoutSeconds !== undefined) { + if ( + typeof timeoutSeconds !== 'number' || + !Number.isFinite(timeoutSeconds) || + timeoutSeconds <= 0 + ) { + throw new Error(`${context}.timeoutSeconds must be a positive number if provided`); + } + out.timeoutSeconds = timeoutSeconds; + } + if (env !== undefined) { + const parsedEnv = parseStringMap(env, `${context}.env`); + if (parsedEnv && Object.keys(parsedEnv).length > 0) { + out.env = parsedEnv; + } + } + return out; +} + +export function parseMemory(value: unknown, context: string): PersonaMemory | undefined { + if (value === undefined) return undefined; + if (typeof value === 'boolean') return value; + if (!isObject(value)) { + throw new Error(`${context} must be a boolean or an object if provided`); + } + const { enabled, scopes, ttlDays, autoPromote, dedupMs } = value; + const out: PersonaMemoryConfig = {}; + if (enabled !== undefined) { + if (typeof enabled !== 'boolean') { + throw new Error(`${context}.enabled must be a boolean if provided`); + } + out.enabled = enabled; + } + if (scopes !== undefined) { + if (!Array.isArray(scopes) || scopes.length === 0) { + throw new Error( + `${context}.scopes must be a non-empty array of memory scopes if provided` + ); + } + const parsedScopes: PersonaMemoryScope[] = []; + for (const [idx, entry] of scopes.entries()) { + if (typeof entry !== 'string' || !MEMORY_SCOPE_VALUES.includes(entry as PersonaMemoryScope)) { + throw new Error( + `${context}.scopes[${idx}] must be one of: ${MEMORY_SCOPE_VALUES.join(', ')}` + ); + } + const scope = entry as PersonaMemoryScope; + if (!parsedScopes.includes(scope)) parsedScopes.push(scope); + } + out.scopes = parsedScopes; + } + if (ttlDays !== undefined) { + if (typeof ttlDays !== 'number' || !Number.isFinite(ttlDays) || ttlDays <= 0) { + throw new Error(`${context}.ttlDays must be a positive number if provided`); + } + out.ttlDays = ttlDays; + } + if (autoPromote !== undefined) { + if (typeof autoPromote !== 'boolean') { + throw new Error(`${context}.autoPromote must be a boolean if provided`); + } + out.autoPromote = autoPromote; + } + if (dedupMs !== undefined) { + if (typeof dedupMs !== 'number' || !Number.isFinite(dedupMs) || dedupMs < 0) { + throw new Error(`${context}.dedupMs must be a non-negative number if provided`); + } + out.dedupMs = dedupMs; + } + return out; +} + +export function parseTraits(value: unknown, context: string): PersonaTraits | undefined { + if (value === undefined) return undefined; + if (!isObject(value)) { + throw new Error(`${context} must be an object if provided`); + } + const { voice, formality, proactivity, riskPosture, domain, vocabulary, preferMarkdown } = value; + const out: PersonaTraits = {}; + if (voice !== undefined) { + if (typeof voice !== 'string' || !voice.trim()) { + throw new Error(`${context}.voice must be a non-empty string if provided`); + } + out.voice = voice; + } + if (formality !== undefined) { + if (typeof formality !== 'string' || !TRAIT_LEVEL_VALUES.includes(formality as 'low')) { + throw new Error(`${context}.formality must be one of: ${TRAIT_LEVEL_VALUES.join(', ')}`); + } + out.formality = formality as PersonaTraits['formality']; + } + if (proactivity !== undefined) { + if (typeof proactivity !== 'string' || !TRAIT_LEVEL_VALUES.includes(proactivity as 'low')) { + throw new Error(`${context}.proactivity must be one of: ${TRAIT_LEVEL_VALUES.join(', ')}`); + } + out.proactivity = proactivity as PersonaTraits['proactivity']; + } + if (riskPosture !== undefined) { + if (typeof riskPosture !== 'string' || !TRAIT_RISK_VALUES.includes(riskPosture as 'balanced')) { + throw new Error(`${context}.riskPosture must be one of: ${TRAIT_RISK_VALUES.join(', ')}`); + } + out.riskPosture = riskPosture as PersonaTraits['riskPosture']; + } + if (domain !== undefined) { + if (typeof domain !== 'string' || !domain.trim()) { + throw new Error(`${context}.domain must be a non-empty string if provided`); + } + out.domain = domain; + } + if (vocabulary !== undefined) { + const parsed = parseStringList(vocabulary, `${context}.vocabulary`); + if (parsed) out.vocabulary = parsed; + } + if (preferMarkdown !== undefined) { + if (typeof preferMarkdown !== 'boolean') { + throw new Error(`${context}.preferMarkdown must be a boolean if provided`); + } + out.preferMarkdown = preferMarkdown; + } + return Object.keys(out).length > 0 ? out : undefined; +} + +export function parseOnEvent(value: unknown, context: string): string | undefined { + if (value === undefined) return undefined; + return assertOnEventPath(value, context); +} + export function parsePersonaSpec(value: unknown, expectedIntent: PersonaIntent): PersonaSpec { if (!isObject(value)) { throw new Error(`persona[${expectedIntent}] must be an object`); @@ -473,7 +796,15 @@ export function parsePersonaSpec(value: unknown, expectedIntent: PersonaIntent): agentsMd, agentsMdMode, claudeMdContent, - agentsMdContent + agentsMdContent, + cloud, + useSubscription, + integrations, + schedules, + sandbox, + memory, + traits, + onEvent } = value; if (typeof id !== 'string' || !id.trim()) { @@ -545,6 +876,22 @@ export function parsePersonaSpec(value: unknown, expectedIntent: PersonaIntent): throw new Error(`persona[${expectedIntent}].agentsMdContent must be a non-empty string`); } + if (cloud !== undefined && typeof cloud !== 'boolean') { + throw new Error(`persona[${expectedIntent}].cloud must be a boolean if provided`); + } + if (useSubscription !== undefined && typeof useSubscription !== 'boolean') { + throw new Error(`persona[${expectedIntent}].useSubscription must be a boolean if provided`); + } + const parsedIntegrations = parseIntegrations( + integrations, + `persona[${expectedIntent}].integrations` + ); + const parsedSchedules = parseSchedules(schedules, `persona[${expectedIntent}].schedules`); + const parsedSandbox = parseSandbox(sandbox, `persona[${expectedIntent}].sandbox`); + const parsedMemory = parseMemory(memory, `persona[${expectedIntent}].memory`); + const parsedTraits = parseTraits(traits, `persona[${expectedIntent}].traits`); + const parsedOnEvent = parseOnEvent(onEvent, `persona[${expectedIntent}].onEvent`); + return { id, intent, @@ -563,7 +910,15 @@ export function parsePersonaSpec(value: unknown, expectedIntent: PersonaIntent): ...(typeof agentsMd === 'string' ? { agentsMd } : {}), ...(agentsMdMode ? { agentsMdMode: agentsMdMode as SidecarMdMode } : {}), ...(typeof claudeMdContent === 'string' ? { claudeMdContent } : {}), - ...(typeof agentsMdContent === 'string' ? { agentsMdContent } : {}) + ...(typeof agentsMdContent === 'string' ? { agentsMdContent } : {}), + ...(typeof cloud === 'boolean' ? { cloud } : {}), + ...(typeof useSubscription === 'boolean' ? { useSubscription } : {}), + ...(parsedIntegrations ? { integrations: parsedIntegrations } : {}), + ...(parsedSchedules ? { schedules: parsedSchedules } : {}), + ...(parsedSandbox !== undefined ? { sandbox: parsedSandbox } : {}), + ...(parsedMemory !== undefined ? { memory: parsedMemory } : {}), + ...(parsedTraits ? { traits: parsedTraits } : {}), + ...(parsedOnEvent !== undefined ? { onEvent: parsedOnEvent } : {}) }; } diff --git a/packages/persona-kit/src/triggers.test.ts b/packages/persona-kit/src/triggers.test.ts new file mode 100644 index 0000000..079b472 --- /dev/null +++ b/packages/persona-kit/src/triggers.test.ts @@ -0,0 +1,93 @@ +import test from 'node:test'; +import assert from 'node:assert/strict'; +import { KNOWN_TRIGGERS, lintTriggers } from './triggers.js'; +import type { PersonaSpec } from './types.js'; + +const baseRuntime = { + harness: 'claude' as const, + model: 'anthropic/claude-3-5-sonnet', + systemPrompt: 'be helpful', + harnessSettings: { reasoning: 'medium' as const, timeoutSeconds: 300 } +}; + +function specWithIntegrations( + integrations: PersonaSpec['integrations'] +): PersonaSpec { + return { + id: 'p', + intent: 'documentation', + tags: ['documentation'], + description: 'd', + skills: [], + tiers: { + best: baseRuntime, + 'best-value': baseRuntime, + minimum: baseRuntime + }, + ...(integrations ? { integrations } : {}) + }; +} + +test('KNOWN_TRIGGERS ships a non-empty list per shipped provider', () => { + for (const [provider, names] of Object.entries(KNOWN_TRIGGERS)) { + assert.ok( + names.length > 0, + `provider ${provider} must declare at least one known trigger` + ); + for (const name of names) { + assert.ok(name.includes('.') || name.includes('_'), `trigger "${name}" should look like an event name`); + } + } +}); + +test('lintTriggers returns no issues for a persona with no integrations', () => { + assert.deepEqual(lintTriggers(specWithIntegrations(undefined)), []); +}); + +test('lintTriggers returns no issues for known providers and known triggers', () => { + const issues = lintTriggers( + specWithIntegrations({ + github: { + triggers: [ + { on: 'pull_request.opened' }, + { on: 'issue_comment.created' } + ] + }, + linear: { triggers: [{ on: 'issue.created' }] } + }) + ); + assert.deepEqual(issues, []); +}); + +test('lintTriggers warns once per unknown provider', () => { + const issues = lintTriggers( + specWithIntegrations({ + mysteryapp: { triggers: [{ on: 'thing.opened' }, { on: 'thing.closed' }] } + }) + ); + assert.equal(issues.length, 1); + assert.equal(issues[0].level, 'warning'); + assert.equal(issues[0].provider, 'mysteryapp'); + assert.equal(issues[0].path, 'integrations.mysteryapp'); +}); + +test('lintTriggers warns per unknown trigger for a known provider', () => { + const issues = lintTriggers( + specWithIntegrations({ + github: { + triggers: [ + { on: 'pull_request.opened' }, + { on: 'pull_request.really_truly_new_event' }, + { on: 'made.up' } + ] + } + }) + ); + const triggers = issues.map((i) => i.trigger).sort(); + assert.deepEqual(triggers, ['made.up', 'pull_request.really_truly_new_event']); + for (const issue of issues) { + assert.equal(issue.level, 'warning'); + assert.equal(issue.provider, 'github'); + assert.match(issue.path, /integrations\.github\.triggers\[\d+\]\.on/); + } +}); diff --git a/packages/persona-kit/src/triggers.ts b/packages/persona-kit/src/triggers.ts new file mode 100644 index 0000000..29b3283 --- /dev/null +++ b/packages/persona-kit/src/triggers.ts @@ -0,0 +1,95 @@ +import type { PersonaSpec } from './types.js'; + +/** + * Known event names per Relayfile provider, used by the deploy CLI to lint + * a persona's declared triggers before deploy. The cloud runtime is the + * authoritative source of truth — unknown names here produce a warning, + * not a failure, so adding a new event upstream doesn't gate workforce + * releases. + * + * This file ships intentionally sparse in v1 (see + * `docs/plans/deploy-v1-codex-spec.md` Task 6). Codex fills it out from the + * Relayfile adapter sources at `/Users/khaliqgant/Projects/AgentWorkforce/ + * relayfile-adapters/` plus the per-provider docs. + */ +export const KNOWN_TRIGGERS = { + github: [ + 'pull_request.opened', + 'pull_request.synchronize', + 'pull_request.closed', + 'pull_request_review_comment.created', + 'issue_comment.created', + 'issues.opened', + 'check_run.completed', + 'workflow_run.completed' + ], + linear: ['issue.created', 'issue.updated', 'comment.created'], + slack: ['app_mention', 'message.channels'], + notion: ['page.updated', 'page.created'], + jira: ['issue.created', 'issue.updated', 'comment.created'] +} as const satisfies Record; + +export type KnownProviderName = keyof typeof KNOWN_TRIGGERS; +export type KnownTriggerName

= (typeof KNOWN_TRIGGERS)[P][number]; + +export type TriggerLintLevel = 'warning'; + +export interface TriggerLintIssue { + level: TriggerLintLevel; + /** Provider slug the issue was raised under (`github`, `linear`, …). */ + provider: string; + /** The trigger name that was flagged. */ + trigger: string; + /** Field-pointed location, e.g. `integrations.github.triggers[2].on`. */ + path: string; + message: string; +} + +/** + * Walk a persona's integration triggers and flag any that don't appear in + * {@link KNOWN_TRIGGERS}. Always returns; never throws. Empty array when + * the persona has no integrations or every trigger is recognized. + * + * The deploy CLI surfaces these as yellow warnings before deploy and + * continues regardless. The runtime applies the trigger regardless of + * what this registry knows. + */ +export function lintTriggers(persona: PersonaSpec): TriggerLintIssue[] { + const issues: TriggerLintIssue[] = []; + const integrations = persona.integrations; + if (!integrations) return issues; + + for (const [provider, config] of Object.entries(integrations)) { + const triggers = config.triggers; + if (!triggers) continue; + const known = (KNOWN_TRIGGERS as Record)[provider]; + + if (!known) { + // Unknown provider: warn once on the integration as a whole so we + // don't spam per-trigger warnings for a provider workforce hasn't + // catalogued yet. + issues.push({ + level: 'warning', + provider, + trigger: '*', + path: `integrations.${provider}`, + message: `provider "${provider}" is not in the known-trigger registry; trigger names will not be linted` + }); + continue; + } + + for (const [idx, trigger] of triggers.entries()) { + if (!known.includes(trigger.on)) { + issues.push({ + level: 'warning', + provider, + trigger: trigger.on, + path: `integrations.${provider}.triggers[${idx}].on`, + message: `trigger "${trigger.on}" is not in the known-trigger registry for ${provider} (known: ${known.join(', ')})` + }); + } + } + } + + return issues; +} diff --git a/packages/persona-kit/src/types.ts b/packages/persona-kit/src/types.ts index 5683ae1..cd3a774 100644 --- a/packages/persona-kit/src/types.ts +++ b/packages/persona-kit/src/types.ts @@ -165,6 +165,106 @@ export type McpServerSpec = env?: Record; }; +/** + * A single event trigger declared by an integration. `on` is a Relayfile- + * adapter-normalized event name (e.g. `pull_request.opened`, + * `issue.created`, `app_mention`). `match` and `where` are filter sugars + * the deploy CLI lints against a known registry; unknown values warn but + * do not fail parse, so the cloud runtime stays the source of truth. + * + * Examples: + * { on: "pull_request.opened" } + * { on: "issue_comment.created", match: "@mention" } + * { on: "check_run.completed", where: "conclusion=failure" } + */ +export interface PersonaIntegrationTrigger { + on: string; + match?: string; + where?: string; +} + +/** + * Per-provider integration configuration. The map key is the Relayfile + * provider slug (`github`, `linear`, `slack`, `notion`, `jira`). `scope` + * is provider-specific filter metadata (e.g. `{ repo: "org/repo" }` for + * github, `{ database: "" }` for notion). `triggers` are flat — all + * trigger events for this provider fan into the same `onEvent` handler, + * which discriminates on `event.source` + `event.type`. + */ +export interface PersonaIntegrationConfig { + scope?: Record; + triggers?: PersonaIntegrationTrigger[]; +} + +/** + * A cron-style schedule. `name` is unique within the persona and surfaces + * to the handler as `event.name`. `cron` is a standard 5-field expression. + * `tz` defaults to `UTC` at the runtime layer (the parser keeps it + * optional so the spec stays close to what the author wrote). + */ +export interface PersonaSchedule { + name: string; + cron: string; + tz?: string; +} + +/** + * Long-form sandbox configuration. `enabled` defaults to true when the + * object form is present; supply the boolean shorthand `sandbox: false` + * to opt out entirely. `timeoutSeconds` caps a single handler invocation + * (default 1800s in the runtime). `env` is merged on top of auto-injected + * secrets at sandbox-create time. + * + * Image selection is intentionally not user-configurable in v1 — workforce + * picks a standard image. Add `image` later if a real demand surfaces. + */ +export interface PersonaSandboxConfig { + enabled?: boolean; + timeoutSeconds?: number; + env?: Record; +} + +/** + * Sandbox can be specified as `true` / `false` shorthand or as the full + * config object. The parser preserves whichever form the author wrote so + * round-trips stay lossless; consumers normalize when reading. + */ +export type PersonaSandbox = boolean | PersonaSandboxConfig; + +/** Memory scope semantics, mirroring @agent-assistant/memory. */ +export type PersonaMemoryScope = 'session' | 'user' | 'workspace' | 'org' | 'object'; + +/** + * Long-form memory configuration. Defaults are applied by the runtime, + * not the parser — the spec keeps only what the author actually wrote. + * `enabled` defaults to true when the object form is present. + */ +export interface PersonaMemoryConfig { + enabled?: boolean; + scopes?: PersonaMemoryScope[]; + ttlDays?: number; + autoPromote?: boolean; + dedupMs?: number; +} + +export type PersonaMemory = boolean | PersonaMemoryConfig; + +/** + * Conversational traits, applied only when the agent posts to a chat + * surface (Slack, Relaycast, GitHub PR comment). Headless agents — the + * paraglide "Linear issue → PR" pattern — should omit this field. Mirrors + * the trait shape in `@agent-assistant/traits`. + */ +export interface PersonaTraits { + voice?: string; + formality?: 'low' | 'medium' | 'high'; + proactivity?: 'low' | 'medium' | 'high'; + riskPosture?: 'conservative' | 'balanced' | 'aggressive'; + domain?: string; + vocabulary?: string[]; + preferMarkdown?: boolean; +} + export interface PersonaSpec { id: string; intent: string; @@ -237,6 +337,57 @@ export interface PersonaSpec { claudeMdContent?: string; /** Inlined `AGENTS.md` content for built-in personas. */ agentsMdContent?: string; + /** + * Opt this persona into the `workforce deploy` cloud-agent surface. + * When `true`, the deploy CLI considers this persona a deployable agent + * (validates {@link integrations} / {@link schedules}, prompts for + * integration connect, bundles {@link onEvent}, hands off to the runtime). + * Local `workforce agent ` flows ignore this flag — non-deploy use + * keeps working unchanged. + */ + cloud?: boolean; + /** + * When `true`, inference for this agent uses the user's connected LLM + * subscription via `@agent-relay/cloud`'s provider link, rather than + * workforce-billed tokens. The deploy CLI calls `connectProvider({...})` + * at deploy time. Only meaningful when {@link cloud} is `true`. + */ + useSubscription?: boolean; + /** + * Per-provider integration declarations keyed by Relayfile provider slug + * (`github`, `linear`, `slack`, `notion`, `jira`). At deploy time the CLI + * runs `RelayfileSetup.connectIntegration({ allowedIntegrations: [key] })` + * for each provider not yet connected to the active workspace. + */ + integrations?: Record; + /** Cron-style schedules. Each `name` is unique within the persona. */ + schedules?: PersonaSchedule[]; + /** + * Sandbox preference. `true` (default for cloud personas) means the + * agent runs inside a Daytona sandbox at deploy time; `false` runs it in + * the runner process. The object form lets the author tune timeout / env. + */ + sandbox?: PersonaSandbox; + /** + * Memory subsystem opt-in. Wires the agent-assistant memory adapter at + * runtime; the persona spec only declares intent, not implementation + * details (api keys, adapter type, etc. come from workforce env). + */ + memory?: PersonaMemory; + /** + * Conversational traits, applied only when the agent posts to a chat + * surface. Omit for headless agents. + */ + traits?: PersonaTraits; + /** + * Relative POSIX path to the TypeScript (or compiled .js / .mjs) file + * whose default export is the deploy-time event handler. Resolved + * relative to the persona JSON's directory at deploy time. Required when + * {@link cloud} is `true` and any trigger is declared; the deploy CLI + * enforces this at deploy time, the parser keeps it optional so partially- + * authored specs still parse. + */ + onEvent?: string; } export interface PersonaSelection { diff --git a/packages/runtime/package.json b/packages/runtime/package.json new file mode 100644 index 0000000..cd11993 --- /dev/null +++ b/packages/runtime/package.json @@ -0,0 +1,50 @@ +{ + "name": "@agentworkforce/runtime", + "version": "0.0.0", + "private": false, + "type": "module", + "main": "dist/index.js", + "types": "dist/index.d.ts", + "exports": { + ".": { + "types": "./dist/index.d.ts", + "default": "./dist/index.js" + }, + "./runner": { + "types": "./dist/runner.d.ts", + "default": "./dist/runner.js" + }, + "./clients": { + "types": "./dist/clients/index.d.ts", + "default": "./dist/clients/index.js" + }, + "./raw": { + "types": "./dist/raw.d.ts", + "default": "./dist/raw.js" + }, + "./package.json": "./package.json" + }, + "files": [ + "dist", + "README.md", + "package.json" + ], + "repository": { + "type": "git", + "url": "https://github.com/AgentWorkforce/workforce", + "directory": "packages/runtime" + }, + "publishConfig": { + "access": "public" + }, + "scripts": { + "build": "tsc -p tsconfig.json", + "dev": "tsc -p tsconfig.json --watch --preserveWatchOutput", + "typecheck": "tsc -p tsconfig.json --noEmit", + "test": "tsc -p tsconfig.json && node --test dist/**/*.test.js dist/*.test.js", + "lint": "tsc -p tsconfig.json --noEmit" + }, + "dependencies": { + "@agentworkforce/persona-kit": "workspace:*" + } +} diff --git a/packages/runtime/src/clients/errors.ts b/packages/runtime/src/clients/errors.ts new file mode 100644 index 0000000..8e4fbe7 --- /dev/null +++ b/packages/runtime/src/clients/errors.ts @@ -0,0 +1,35 @@ +/** + * Error thrown by every integration client when a remote call fails. The + * runtime's retry loop reads `retryable` to decide whether to redeliver + * the event; tests + handlers can branch on `provider` + `operation` for + * targeted recovery without parsing message strings. + */ +export class WorkforceIntegrationError extends Error { + readonly provider: string; + readonly operation: string; + readonly status?: number; + readonly retryable: boolean; + override readonly cause?: unknown; + + constructor(opts: { + provider: string; + operation: string; + message: string; + status?: number; + retryable?: boolean; + cause?: unknown; + }) { + super(`${opts.provider}.${opts.operation}: ${opts.message}`); + this.name = 'WorkforceIntegrationError'; + this.provider = opts.provider; + this.operation = opts.operation; + this.retryable = opts.retryable ?? false; + if (opts.status !== undefined) this.status = opts.status; + if (opts.cause !== undefined) this.cause = opts.cause; + } +} + +/** 5xx and 429 responses are retryable; 4xx (other than 429) are not. */ +export function isRetryableStatus(status: number): boolean { + return status >= 500 || status === 429; +} diff --git a/packages/runtime/src/clients/github.test.ts b/packages/runtime/src/clients/github.test.ts new file mode 100644 index 0000000..dd7d5db --- /dev/null +++ b/packages/runtime/src/clients/github.test.ts @@ -0,0 +1,143 @@ +import test from 'node:test'; +import assert from 'node:assert/strict'; +import { createGithubClient } from './github.js'; +import { WorkforceIntegrationError } from './errors.js'; + +interface RecordedCall { + url: string; + method: string; + headers: Record; + body?: unknown; +} + +function fakeFetch( + handlers: Array<(call: RecordedCall) => Response | Promise> +): { fetch: typeof fetch; calls: RecordedCall[] } { + const calls: RecordedCall[] = []; + let i = 0; + const fakeImpl = (async (input: RequestInfo | URL, init?: RequestInit) => { + const url = typeof input === 'string' ? input : input.toString(); + const headers: Record = {}; + if (init?.headers) { + const entries = + init.headers instanceof Headers + ? Array.from(init.headers.entries()) + : Array.isArray(init.headers) + ? init.headers + : Object.entries(init.headers); + for (const [k, v] of entries) headers[k.toLowerCase()] = String(v); + } + const body = init?.body ? JSON.parse(init.body.toString()) : undefined; + const call: RecordedCall = { + url, + method: init?.method ?? 'GET', + headers, + ...(body !== undefined ? { body } : {}) + }; + calls.push(call); + const handler = handlers[i]; + if (!handler) throw new Error(`fakeFetch: no handler at call index ${i}`); + i += 1; + return handler(call); + }) as typeof fetch; + return { fetch: fakeImpl, calls }; +} + +test('createGithubClient.comment POSTs the issue comment endpoint with the right headers', async () => { + const { fetch: fakeImpl, calls } = fakeFetch([ + () => + new Response(JSON.stringify({ id: 1, html_url: 'https://github.com/o/r/issues/2#issuecomment-1' }), { + status: 201 + }) + ]); + const client = createGithubClient({ token: 'pat_abc', fetchImpl: fakeImpl }); + const ref = await client.comment({ owner: 'o', repo: 'r', number: 2 }, 'hello'); + + assert.equal(ref.number, 2); + assert.equal(ref.url, 'https://github.com/o/r/issues/2#issuecomment-1'); + assert.equal(calls[0].url, 'https://api.github.com/repos/o/r/issues/2/comments'); + assert.equal(calls[0].method, 'POST'); + assert.equal(calls[0].headers.authorization, 'Bearer pat_abc'); + assert.equal(calls[0].headers['x-github-api-version'], '2022-11-28'); + assert.deepEqual(calls[0].body, { body: 'hello' }); +}); + +test('createGithubClient.upsertIssue creates when no open match is found', async () => { + const { fetch: fakeImpl, calls } = fakeFetch([ + () => new Response(JSON.stringify({ items: [] }), { status: 200 }), + () => new Response(JSON.stringify({ number: 99, html_url: 'https://github.com/o/r/issues/99' }), { status: 201 }) + ]); + const client = createGithubClient({ token: 't', fetchImpl: fakeImpl }); + const result = await client.upsertIssue({ + owner: 'o', + repo: 'r', + title: 'fresh', + body: 'body', + matchTitle: 'fresh', + labels: ['digest'] + }); + assert.equal(result.created, true); + assert.equal(result.number, 99); + assert.equal(calls[0].method, 'GET'); + assert.match(calls[0].url, /\/search\/issues\?q=/); + assert.equal(calls[1].method, 'POST'); + assert.deepEqual(calls[1].body, { title: 'fresh', body: 'body', labels: ['digest'] }); +}); + +test('createGithubClient.upsertIssue PATCHes when an open match exists', async () => { + const { fetch: fakeImpl, calls } = fakeFetch([ + () => + new Response( + JSON.stringify({ + items: [{ number: 7, title: 'weekly-digest', state: 'open', html_url: 'https://github.com/o/r/issues/7' }] + }), + { status: 200 } + ), + () => new Response(null, { status: 204 }) + ]); + const client = createGithubClient({ token: 't', fetchImpl: fakeImpl }); + const result = await client.upsertIssue({ + owner: 'o', + repo: 'r', + title: 'weekly-digest', + body: 'refreshed', + matchTitle: 'weekly-digest' + }); + assert.equal(result.created, false); + assert.equal(result.number, 7); + assert.equal(calls[1].method, 'PATCH'); + assert.deepEqual(calls[1].body, { body: 'refreshed' }); +}); + +test('createGithubClient surfaces non-2xx with WorkforceIntegrationError', async () => { + const { fetch: fakeImpl } = fakeFetch([ + () => new Response('rate limited', { status: 429, statusText: 'Too Many Requests' }) + ]); + const client = createGithubClient({ token: 't', fetchImpl: fakeImpl }); + await assert.rejects( + () => client.comment({ owner: 'o', repo: 'r', number: 1 }, 'x'), + (err: unknown) => { + assert.ok(err instanceof WorkforceIntegrationError); + assert.equal(err.provider, 'github'); + assert.equal(err.operation, 'comment'); + assert.equal(err.status, 429); + assert.equal(err.retryable, true); + return true; + } + ); +}); + +test('createGithubClient surfaces 4xx as non-retryable', async () => { + const { fetch: fakeImpl } = fakeFetch([ + () => new Response('not found', { status: 404, statusText: 'Not Found' }) + ]); + const client = createGithubClient({ token: 't', fetchImpl: fakeImpl }); + await assert.rejects( + () => client.comment({ owner: 'o', repo: 'r', number: 1 }, 'x'), + (err: unknown) => { + assert.ok(err instanceof WorkforceIntegrationError); + assert.equal(err.retryable, false); + return true; + } + ); +}); diff --git a/packages/runtime/src/clients/github.ts b/packages/runtime/src/clients/github.ts new file mode 100644 index 0000000..e5d4748 --- /dev/null +++ b/packages/runtime/src/clients/github.ts @@ -0,0 +1,232 @@ +import { isRetryableStatus, WorkforceIntegrationError } from './errors.js'; + +export interface GithubIssueTarget { + owner: string; + repo: string; + number: number; +} + +export interface GithubRepoCoords { + owner: string; + repo: string; +} + +export interface GithubIssueRef { + number: number; + url: string; +} + +export interface GithubUpsertResult extends GithubIssueRef { + created: boolean; +} + +export interface GithubReviewComment { + path: string; + line: number; + body: string; +} + +export interface GithubReview { + body: string; + event: 'COMMENT' | 'APPROVE' | 'REQUEST_CHANGES'; + comments?: GithubReviewComment[]; +} + +export interface GithubPr { + title: string; + body: string; + diff: string; + head: string; + base: string; + author: string; +} + +/** + * Minimal GitHub client used by personas. Today it covers the operations + * weekly-digest and review-agent need; we grow it by need rather than + * mirroring the full REST surface. + */ +export interface GithubClient { + comment(target: GithubIssueTarget, body: string): Promise; + createIssue(args: GithubRepoCoords & { title: string; body: string; labels?: string[] }): Promise; + upsertIssue(args: GithubRepoCoords & { title: string; body: string; labels?: string[]; matchTitle: string }): Promise; + getPr(target: GithubIssueTarget): Promise; + postReview(target: GithubIssueTarget, review: GithubReview): Promise; +} + +export interface GithubClientOptions { + /** Bearer token. Either a PAT or a Relayfile-issued scoped token. */ + token: string; + /** Override for GitHub Enterprise. Defaults to api.github.com. */ + apiUrl?: string; + /** Optional fetch override (tests + custom transports). */ + fetchImpl?: typeof fetch; +} + +/** + * Construct a real GitHub client. The token is sent on every request as + * a Bearer credential, matching both PAT and GitHub App installation + * token conventions. + */ +export function createGithubClient(opts: GithubClientOptions): GithubClient { + const apiUrl = (opts.apiUrl ?? 'https://api.github.com').replace(/\/$/, ''); + const fetchImpl = opts.fetchImpl ?? fetch; + + async function request(operation: string, init: { method: string; pathname: string; body?: unknown }): Promise { + const url = `${apiUrl}${init.pathname}`; + let response: Response; + try { + response = await fetchImpl(url, { + method: init.method, + headers: { + accept: 'application/vnd.github+json', + authorization: `Bearer ${opts.token}`, + 'x-github-api-version': '2022-11-28', + ...(init.body !== undefined ? { 'content-type': 'application/json' } : {}), + 'user-agent': 'workforce-runtime' + }, + ...(init.body !== undefined ? { body: JSON.stringify(init.body) } : {}) + }); + } catch (err) { + throw new WorkforceIntegrationError({ + provider: 'github', + operation, + message: `network error: ${err instanceof Error ? err.message : String(err)}`, + retryable: true, + cause: err + }); + } + + if (!response.ok) { + const bodyText = await response.text().catch(() => ''); + throw new WorkforceIntegrationError({ + provider: 'github', + operation, + message: `${response.status} ${response.statusText}${bodyText ? ` — ${truncate(bodyText, 400)}` : ''}`, + status: response.status, + retryable: isRetryableStatus(response.status) + }); + } + + if (response.status === 204) return undefined as T; + return (await response.json()) as T; + } + + return { + async comment(target, body) { + const out = await request<{ id: number; html_url: string }>('comment', { + method: 'POST', + pathname: `/repos/${target.owner}/${target.repo}/issues/${target.number}/comments`, + body: { body } + }); + return { number: target.number, url: out.html_url }; + }, + async createIssue(args) { + const out = await request<{ number: number; html_url: string }>('createIssue', { + method: 'POST', + pathname: `/repos/${args.owner}/${args.repo}/issues`, + body: { + title: args.title, + body: args.body, + ...(args.labels ? { labels: args.labels } : {}) + } + }); + return { number: out.number, url: out.html_url }; + }, + async upsertIssue(args) { + const search = await request<{ + items: Array<{ number: number; title: string; html_url: string; state: string }>; + }>('upsertIssue.search', { + method: 'GET', + pathname: `/search/issues?q=${encodeURIComponent( + `repo:${args.owner}/${args.repo} in:title is:issue "${args.matchTitle}"` + )}&per_page=10` + }); + const exact = search.items.find( + (item) => item.title === args.matchTitle && item.state === 'open' + ); + if (exact) { + await request('upsertIssue.edit', { + method: 'PATCH', + pathname: `/repos/${args.owner}/${args.repo}/issues/${exact.number}`, + body: { + body: args.body, + ...(args.labels ? { labels: args.labels } : {}) + } + }); + return { number: exact.number, url: exact.html_url, created: false }; + } + const created = await request<{ number: number; html_url: string }>('upsertIssue.create', { + method: 'POST', + pathname: `/repos/${args.owner}/${args.repo}/issues`, + body: { + title: args.matchTitle === args.title ? args.title : args.matchTitle, + body: args.body, + ...(args.labels ? { labels: args.labels } : {}) + } + }); + return { number: created.number, url: created.html_url, created: true }; + }, + async getPr(target) { + const pr = await request<{ + title: string; + body: string | null; + head: { ref: string }; + base: { ref: string }; + user: { login: string } | null; + diff_url: string; + }>('getPr.metadata', { + method: 'GET', + pathname: `/repos/${target.owner}/${target.repo}/pulls/${target.number}` + }); + const diff = await fetchDiff(fetchImpl, opts.token, pr.diff_url); + return { + title: pr.title, + body: pr.body ?? '', + diff, + head: pr.head.ref, + base: pr.base.ref, + author: pr.user?.login ?? '' + }; + }, + async postReview(target, review) { + await request('postReview', { + method: 'POST', + pathname: `/repos/${target.owner}/${target.repo}/pulls/${target.number}/reviews`, + body: { + body: review.body, + event: review.event, + ...(review.comments + ? { + comments: review.comments.map((c) => ({ path: c.path, line: c.line, body: c.body })) + } + : {}) + } + }); + } + }; +} + +async function fetchDiff(fetchImpl: typeof fetch, token: string, diffUrl: string): Promise { + const response = await fetchImpl(diffUrl, { + headers: { + accept: 'application/vnd.github.v3.diff', + authorization: `Bearer ${token}`, + 'user-agent': 'workforce-runtime' + } + }); + if (!response.ok) { + throw new WorkforceIntegrationError({ + provider: 'github', + operation: 'getPr.diff', + message: `${response.status} ${response.statusText}`, + status: response.status, + retryable: isRetryableStatus(response.status) + }); + } + return response.text(); +} + +function truncate(s: string, n: number): string { + return s.length <= n ? s : `${s.slice(0, n)}…`; +} diff --git a/packages/runtime/src/clients/index.ts b/packages/runtime/src/clients/index.ts new file mode 100644 index 0000000..2ebca07 --- /dev/null +++ b/packages/runtime/src/clients/index.ts @@ -0,0 +1,14 @@ +export { + createGithubClient, + type GithubClient, + type GithubClientOptions, + type GithubIssueRef, + type GithubIssueTarget, + type GithubPr, + type GithubRepoCoords, + type GithubReview, + type GithubReviewComment, + type GithubUpsertResult +} from './github.js'; + +export { WorkforceIntegrationError, isRetryableStatus } from './errors.js'; diff --git a/packages/runtime/src/ctx.ts b/packages/runtime/src/ctx.ts new file mode 100644 index 0000000..1453751 --- /dev/null +++ b/packages/runtime/src/ctx.ts @@ -0,0 +1,119 @@ +import type { PersonaSpec } from '@agentworkforce/persona-kit'; +import type { + LlmContext, + MemoryContext, + ScheduleContext, + SandboxContext, + WorkforceCtx, + WorkflowContext +} from './types.js'; + +/** + * Options passed to `buildCtx` when the runner cold-starts. The deploy + * package supplies these from the bundle metadata + environment. + * + * Required subsystems (`sandbox`, `harnessRunner`) must always be + * provided — there is no sensible default for spawning a harness or + * executing inside an isolated filesystem. Optional subsystems + * (`llm`, `memory`, `workflow`, `schedule`, `log`, `integrations`) + * fall back to documented defaults: `memory` becomes a no-op (so + * `ctx.memory.save(...)` is safe to call from any handler), the rest + * throw with a single-line "not configured" message that names the + * persona-side flag a caller would set to enable them. + */ +export interface CtxBuildOptions { + persona: PersonaSpec; + workspaceId: string; + agentName?: string; + sandbox: SandboxContext; + llm?: LlmContext; + memory?: MemoryContext; + workflow?: WorkflowContext; + schedule?: ScheduleContext; + integrations?: Record; + log?: WorkforceCtx['log']; + harnessRunner: WorkforceCtx['harness']['run']; +} + +const NOOP_MEMORY: MemoryContext = { + async save() { + /* memory disabled (persona.memory unset) — saves silently no-op */ + }, + async recall() { + return []; + } +}; + +const UNAVAILABLE_LLM: LlmContext = { + async complete() { + throw new Error( + 'ctx.llm is unavailable: set persona.useSubscription:true and connect a provider, or pass a workforce-billed LlmContext to buildCtx.' + ); + } +}; + +const UNAVAILABLE_WORKFLOW: WorkflowContext = { + async run() { + throw new Error( + 'ctx.workflow is unavailable: the runner is not connected to the workforce workflows API (workspace token missing).' + ); + }, + async status() { + throw new Error( + 'ctx.workflow is unavailable: the runner is not connected to the workforce workflows API (workspace token missing).' + ); + } +}; + +const UNAVAILABLE_SCHEDULE: ScheduleContext = { + async at() { + throw new Error( + 'ctx.schedule.at is unavailable: connect the runner to a scheduler (relaycron or workforce cloud) before scheduling follow-ups.' + ); + }, + async cancel() { + throw new Error( + 'ctx.schedule.cancel is unavailable: connect the runner to a scheduler (relaycron or workforce cloud) before canceling schedules.' + ); + } +}; + +function defaultLog(level: string, message: string, attrs?: Record): void { + const stream = level === 'error' || level === 'warn' ? process.stderr : process.stdout; + const line = JSON.stringify({ t: new Date().toISOString(), level, message, ...(attrs ?? {}) }); + stream.write(`${line}\n`); +} + +/** + * Compose a WorkforceCtx from the deploy-supplied subsystems. Subsystems + * left unset fall back to the documented defaults at the top of this + * file — handlers that depend on an unavailable subsystem fail with a + * clear runtime error rather than silently dropping work. + */ +export function buildCtx(options: CtxBuildOptions): WorkforceCtx { + const ctx: WorkforceCtx = { + persona: options.persona, + workspaceId: options.workspaceId, + agentName: options.agentName ?? options.persona.id, + llm: options.llm ?? UNAVAILABLE_LLM, + harness: { run: options.harnessRunner }, + sandbox: options.sandbox, + memory: options.memory ?? NOOP_MEMORY, + workflow: options.workflow ?? UNAVAILABLE_WORKFLOW, + schedule: options.schedule ?? UNAVAILABLE_SCHEDULE, + log: options.log ?? defaultLog + }; + + // Per-integration clients attach as named ctx fields. The deploy step + // decides the concrete shape of each client — `github` is a typed + // `GithubClient`, others are `unknown` until they ship. Handlers + // narrow with a runtime check (`if (ctx.linear)`) and cast against + // the future client interface. + if (options.integrations) { + for (const [provider, client] of Object.entries(options.integrations)) { + Object.assign(ctx, { [provider]: client }); + } + } + + return ctx; +} diff --git a/packages/runtime/src/handler.ts b/packages/runtime/src/handler.ts new file mode 100644 index 0000000..bee70e9 --- /dev/null +++ b/packages/runtime/src/handler.ts @@ -0,0 +1,43 @@ +import type { WorkforceHandler, WorkforceHandlerExport } from './types.js'; + +/** + * Brand a user-supplied event handler so the runtime can recognize it + * after dynamic import. Identity at runtime — `handler(fn) === fn` with + * an added non-enumerable marker. The wrapper exists so we can grow the + * handler-side API later (e.g. lifecycle hooks, declared capabilities) + * without breaking older bundles. + * + * Usage: + * ```ts + * import { handler } from '@agentworkforce/runtime'; + * + * export default handler(async (ctx, event) => { + * if (event.source === 'github' && event.type === 'pull_request.opened') { + * await ctx.github!.comment(...); + * } + * }); + * ``` + */ +export function handler(fn: WorkforceHandler): WorkforceHandlerExport { + if (typeof fn !== 'function') { + throw new TypeError('handler() expects a function'); + } + Object.defineProperty(fn, '__workforceHandler', { + value: true, + writable: false, + enumerable: false, + configurable: false + }); + return fn as WorkforceHandlerExport; +} + +/** + * Detect whether a value looks like a branded workforce handler. Used by + * the runner to validate the bundle's default export before invoking it. + */ +export function isWorkforceHandler(value: unknown): value is WorkforceHandlerExport { + return ( + typeof value === 'function' && + (value as { __workforceHandler?: unknown }).__workforceHandler === true + ); +} diff --git a/packages/runtime/src/index.ts b/packages/runtime/src/index.ts new file mode 100644 index 0000000..6321472 --- /dev/null +++ b/packages/runtime/src/index.ts @@ -0,0 +1,59 @@ +// Public DX surface for personas. Authors import `handler` to brand +// their event handler; everything else flows through ctx + event types. + +export { handler, isWorkforceHandler } from './handler.js'; + +export type { + HarnessRunArgs, + HarnessRunResult, + IntegrationClients, + LlmContext, + MemoryContext, + MemoryItem, + MemoryRecallOptions, + MemorySaveOptions, + SandboxContext, + SandboxExecArgs, + SandboxExecResult, + ScheduleContext, + WorkflowContext, + WorkflowRunHandle, + WorkforceCronEvent, + WorkforceCtx, + WorkforceEvent, + WorkforceEventSource, + WorkforceHandler, + WorkforceHandlerExport, + WorkforceProviderEvent +} from './types.js'; + +// Integration clients — concrete today: github. Others are typed `unknown` +// in `WorkforceCtx` until they ship; importing them from here keeps +// handler-side imports stable when typed clients land. +export { + createGithubClient, + WorkforceIntegrationError, + isRetryableStatus, + type GithubClient, + type GithubClientOptions, + type GithubIssueRef, + type GithubIssueTarget, + type GithubPr, + type GithubRepoCoords, + type GithubReview, + type GithubReviewComment, + type GithubUpsertResult +} from './clients/index.js'; + +// Re-export persona-kit types personas commonly reference at the handler +// surface, so users don't need a second import for the shapes the ctx +// carries. +export type { + PersonaIntegrationConfig, + PersonaIntegrationTrigger, + PersonaMemoryScope, + PersonaSchedule, + PersonaSpec, + PersonaTier, + PersonaTraits +} from '@agentworkforce/persona-kit'; diff --git a/packages/runtime/src/raw.ts b/packages/runtime/src/raw.ts new file mode 100644 index 0000000..665e5ac --- /dev/null +++ b/packages/runtime/src/raw.ts @@ -0,0 +1,18 @@ +/** + * Lower-level escape hatch for power users who want the cloud + * proactive-runtime SDK directly without workforce's `handler()` / + * `WorkforceCtx` ergonomics. When `@agent-relay/agent` is published, this + * file should switch to: + * + * export { agent } from '@agent-relay/agent'; + * + * Until that package is available on npm, this module exposes the same + * shapes as documented in `cloud-proactive-runtime-spec/docs/proactive- + * runtime/spec.md` so users can write code against the contract today + * and swap the import once the SDK ships. + */ + +export { shimEnvelope, type RawGatewayEnvelope } from './shim.js'; +export type { CtxBuildOptions } from './ctx.js'; +export { buildCtx } from './ctx.js'; +export { startRunner, type StartRunnerOptions } from './runner.js'; diff --git a/packages/runtime/src/runner.test.ts b/packages/runtime/src/runner.test.ts new file mode 100644 index 0000000..e382177 --- /dev/null +++ b/packages/runtime/src/runner.test.ts @@ -0,0 +1,138 @@ +import test from 'node:test'; +import assert from 'node:assert/strict'; +import type { PersonaSpec } from '@agentworkforce/persona-kit'; +import { startRunner } from './runner.js'; +import { handler } from './handler.js'; +import type { RawGatewayEnvelope } from './shim.js'; +import type { SandboxContext, WorkforceEvent } from './types.js'; + +const baseRuntime = { + harness: 'claude' as const, + model: 'anthropic/claude-3-5-sonnet', + systemPrompt: 'be helpful', + harnessSettings: { reasoning: 'medium' as const, timeoutSeconds: 300 } +}; + +const persona: PersonaSpec = { + id: 'demo', + intent: 'documentation', + tags: ['documentation'], + description: 'test persona', + skills: [], + tiers: { best: baseRuntime, 'best-value': baseRuntime, minimum: baseRuntime }, + cloud: true, + schedules: [{ name: 'weekly', cron: '0 9 * * 6' }] +}; + +const stubSandbox: SandboxContext = { + cwd: '/tmp', + async exec() { + return { output: '', exitCode: 0 }; + }, + async readFile() { + return ''; + }, + async writeFile() { + /* no-op */ + } +}; + +async function* streamOf(envelopes: RawGatewayEnvelope[]): AsyncGenerator { + for (const env of envelopes) yield env; +} + +test('startRunner dispatches a cron envelope to the handler', async () => { + const received: WorkforceEvent[] = []; + const logs: Array<{ level: string; message: string }> = []; + await startRunner({ + persona, + workspaceId: 'ws-test', + handler: handler(async (_ctx, event) => { + received.push(event); + }), + subsystems: { + sandbox: stubSandbox, + log: (level, message) => logs.push({ level, message }) + }, + envelopes: streamOf([ + { + id: 'e1', + workspace: 'ws-test', + type: 'cron.tick', + occurredAt: '2026-05-12T09:00:00Z', + name: 'weekly', + cron: '0 9 * * 6' + } + ]) + }); + assert.equal(received.length, 1); + assert.equal(received[0].source, 'cron'); + if (received[0].source !== 'cron') return; + assert.equal(received[0].name, 'weekly'); + assert.ok(logs.find((l) => l.message === 'runner.handler.ok')); +}); + +test('startRunner logs and continues when the handler throws', async () => { + const logs: Array<{ level: string; message: string }> = []; + let invocations = 0; + await startRunner({ + persona, + workspaceId: 'ws-test', + handler: handler(async () => { + invocations += 1; + throw new Error('boom'); + }), + subsystems: { + sandbox: stubSandbox, + log: (level, message) => logs.push({ level, message }) + }, + envelopes: streamOf([ + { id: 'e1', workspace: 'ws-test', type: 'cron.tick', occurredAt: 'x' }, + { id: 'e2', workspace: 'ws-test', type: 'cron.tick', occurredAt: 'x' } + ]) + }); + assert.equal(invocations, 2, 'handler is invoked again after the first failure'); + const errors = logs.filter((l) => l.message === 'runner.handler.error'); + assert.equal(errors.length, 2); +}); + +test('startRunner skips envelopes that the shim can not translate', async () => { + const received: WorkforceEvent[] = []; + const logs: Array<{ level: string; message: string }> = []; + await startRunner({ + persona, + workspaceId: 'ws-test', + handler: handler(async (_ctx, event) => { + received.push(event); + }), + subsystems: { + sandbox: stubSandbox, + log: (level, message) => logs.push({ level, message }) + }, + envelopes: streamOf([ + { id: 'e1', workspace: 'ws-test', type: 'mystery.thing', occurredAt: 'x' }, + { id: 'e2', workspace: 'ws-test', type: 'cron.tick', occurredAt: 'x', name: 'tick' } + ]) + }); + assert.equal(received.length, 1); + assert.ok(logs.find((l) => l.message === 'runner.envelope.unsupported')); +}); + +test('startRunner throws when workspaceId is missing from both options and env', async () => { + const previous = process.env.WORKFORCE_WORKSPACE_ID; + delete process.env.WORKFORCE_WORKSPACE_ID; + try { + await assert.rejects( + () => + startRunner({ + persona, + handler: handler(async () => {}), + subsystems: { sandbox: stubSandbox }, + envelopes: streamOf([]) + }), + /workspaceId is required/ + ); + } finally { + if (previous !== undefined) process.env.WORKFORCE_WORKSPACE_ID = previous; + } +}); diff --git a/packages/runtime/src/runner.ts b/packages/runtime/src/runner.ts new file mode 100644 index 0000000..ae57936 --- /dev/null +++ b/packages/runtime/src/runner.ts @@ -0,0 +1,209 @@ +import type { PersonaSpec } from '@agentworkforce/persona-kit'; +import { buildCtx, type CtxBuildOptions } from './ctx.js'; +import { isWorkforceHandler } from './handler.js'; +import { shimEnvelope, type RawGatewayEnvelope } from './shim.js'; +import type { + HarnessRunArgs, + HarnessRunResult, + SandboxContext, + WorkforceEvent, + WorkforceHandler, + WorkforceHandlerExport +} from './types.js'; + +export interface StartRunnerOptions { + /** Parsed persona JSON. Required. */ + persona: PersonaSpec; + /** + * Default-exported handler from the bundled `agent.ts`. The runner + * accepts both a branded `WorkforceHandlerExport` (preferred) and a raw + * function (when the bundle was authored before the `handler()` wrapper + * existed). + */ + handler: WorkforceHandlerExport | WorkforceHandler; + /** + * Workspace identifier. Resolved from `WORKFORCE_WORKSPACE_ID` env when + * not supplied. The runner refuses to start without one. + */ + workspaceId?: string; + /** + * Subsystem overrides. Most callers leave these unset; the deploy + * package's mode-specific entry points (`runDev`, `runSandbox`) supply + * the wired-up versions. Tests pass in-memory fakes here. + */ + subsystems?: Partial>; + /** + * Source of raw envelopes to dispatch. The default reads NDJSON from + * stdin so a parent process can write `RawGatewayEnvelope` lines and + * read structured logs back on stdout — useful both in `--mode dev` and + * inside a Daytona sandbox where stdin/stdout are the simplest contract. + */ + envelopes?: AsyncIterable; + /** + * Harness runner. Required because spawning a harness inside a sandbox + * is mode-specific (Daytona exec vs local child_process). When omitted, + * `ctx.harness.run` throws a clear error. + */ + harnessRunner?: (args: HarnessRunArgs) => Promise; +} + +const HARNESS_UNAVAILABLE: (args: HarnessRunArgs) => Promise = async () => { + throw new Error( + 'ctx.harness.run is unavailable: this runner was started without a harnessRunner. Use `workforce deploy --mode sandbox` to run inside Daytona, or supply a harnessRunner via StartRunnerOptions.' + ); +}; + +const PROCESS_FS_SANDBOX: SandboxContext = { + cwd: process.cwd(), + async exec() { + throw new Error( + 'ctx.sandbox.exec is unavailable: this runner was started without a SandboxContext. Use `workforce deploy --mode sandbox` to enable a Daytona sandbox.' + ); + }, + async readFile() { + throw new Error( + 'ctx.sandbox.readFile is unavailable: this runner was started without a SandboxContext.' + ); + }, + async writeFile() { + throw new Error( + 'ctx.sandbox.writeFile is unavailable: this runner was started without a SandboxContext.' + ); + } +}; + +/** + * Cold-start the agent. Returns a promise that resolves once the envelope + * stream completes (in production this is essentially "never", since the + * stream is a long-lived gateway WebSocket). + * + * The runner: + * 1. Validates the handler is callable (branded or raw function). + * 2. Builds a `WorkforceCtx` once, reused across invocations. + * 3. Iterates the envelope stream, shims each envelope into a + * `WorkforceEvent`, dispatches to the handler. + * 4. Catches handler errors, logs them with full attribution. The outer + * retry/backoff lives at the deploy layer (mode-specific), so this + * function doesn't attempt redelivery. + */ +export async function startRunner(options: StartRunnerOptions): Promise { + const handlerFn: WorkforceHandler = options.handler as WorkforceHandler; + if (typeof handlerFn !== 'function') { + throw new TypeError('startRunner: options.handler must be a function'); + } + if (!isWorkforceHandler(handlerFn)) { + // Soft warning, not an error — power users who import `@agent-relay/agent` + // directly may export a raw function that still satisfies the shape. + process.stderr.write( + '[workforce-runtime] handler is not branded with `handler()` — accepting raw function, but prefer `export default handler(fn)`.\n' + ); + } + + const workspaceId = options.workspaceId ?? process.env.WORKFORCE_WORKSPACE_ID; + if (!workspaceId) { + throw new Error( + 'startRunner: workspaceId is required (pass via options or set WORKFORCE_WORKSPACE_ID)' + ); + } + + const ctx = buildCtx({ + persona: options.persona, + workspaceId, + sandbox: options.subsystems?.sandbox ?? PROCESS_FS_SANDBOX, + harnessRunner: options.harnessRunner ?? HARNESS_UNAVAILABLE, + ...(options.subsystems?.llm ? { llm: options.subsystems.llm } : {}), + ...(options.subsystems?.memory ? { memory: options.subsystems.memory } : {}), + ...(options.subsystems?.workflow ? { workflow: options.subsystems.workflow } : {}), + ...(options.subsystems?.schedule ? { schedule: options.subsystems.schedule } : {}), + ...(options.subsystems?.log ? { log: options.subsystems.log } : {}), + ...(options.subsystems?.integrations ? { integrations: options.subsystems.integrations } : {}) + }); + + ctx.log('info', 'runner.started', { + persona: options.persona.id, + workspaceId, + schedules: options.persona.schedules?.map((s) => s.name) ?? [], + integrations: options.persona.integrations ? Object.keys(options.persona.integrations) : [] + }); + + const stream = options.envelopes ?? readEnvelopesFromStdin(); + for await (const raw of stream) { + const event = shimEnvelope(raw); + if (!event) { + ctx.log('warn', 'runner.envelope.unsupported', { rawId: raw.id, rawType: raw.type }); + continue; + } + await dispatch(ctx, handlerFn, event); + } + + ctx.log('info', 'runner.envelope-stream.ended', { persona: options.persona.id }); +} + +async function dispatch( + ctx: Parameters[0], + fn: WorkforceHandler, + event: WorkforceEvent +): Promise { + const t0 = Date.now(); + try { + await fn(ctx, event); + ctx.log('info', 'runner.handler.ok', { + eventId: event.id, + source: event.source, + type: event.source === 'cron' ? 'cron.tick' : event.type, + durationMs: Date.now() - t0 + }); + } catch (err) { + ctx.log('error', 'runner.handler.error', { + eventId: event.id, + source: event.source, + type: event.source === 'cron' ? 'cron.tick' : event.type, + attempt: event.attempt, + durationMs: Date.now() - t0, + error: err instanceof Error ? err.message : String(err), + stack: err instanceof Error ? err.stack : undefined + }); + // Surface the failure to the outer process so the deploy layer can + // retry. Throwing here would tear down the for-await loop; the deploy + // layer reads the structured log line above instead. + } +} + +async function* readEnvelopesFromStdin(): AsyncGenerator { + // Lazily parse NDJSON from stdin so the runner can be driven by any + // parent process that can pipe envelope lines. Each line is a JSON + // object; malformed lines are logged and skipped. + const decoder = new TextDecoder(); + let buffer = ''; + + for await (const chunk of process.stdin) { + const text = typeof chunk === 'string' ? chunk : decoder.decode(chunk as Buffer); + buffer += text; + let nl = buffer.indexOf('\n'); + while (nl !== -1) { + const line = buffer.slice(0, nl).trim(); + buffer = buffer.slice(nl + 1); + if (line.length > 0) { + try { + const parsed = JSON.parse(line) as RawGatewayEnvelope; + yield parsed; + } catch (err) { + process.stderr.write( + `[workforce-runtime] failed to parse envelope line: ${err instanceof Error ? err.message : String(err)}\n` + ); + } + } + nl = buffer.indexOf('\n'); + } + } + + // Drain any trailing line that lacked a terminating newline. + const tail = buffer.trim(); + if (tail.length > 0) { + try { + yield JSON.parse(tail) as RawGatewayEnvelope; + } catch { + /* ignore */ + } + } +} diff --git a/packages/runtime/src/shim.test.ts b/packages/runtime/src/shim.test.ts new file mode 100644 index 0000000..575fd40 --- /dev/null +++ b/packages/runtime/src/shim.test.ts @@ -0,0 +1,91 @@ +import test from 'node:test'; +import assert from 'node:assert/strict'; +import { shimEnvelope } from './shim.js'; +import { handler, isWorkforceHandler } from './handler.js'; + +test('shimEnvelope translates a cron.tick envelope', () => { + const ev = shimEnvelope({ + id: 'evt-1', + workspace: 'ws-acme', + type: 'cron.tick', + occurredAt: '2026-05-12T09:00:00Z', + name: 'weekly', + cron: '0 9 * * 6' + }); + assert.ok(ev); + assert.equal(ev.source, 'cron'); + if (ev.source !== 'cron') return; + assert.equal(ev.name, 'weekly'); + assert.equal(ev.cron, '0 9 * * 6'); + assert.equal(ev.workspaceId, 'ws-acme'); + assert.equal(ev.attempt, 1); +}); + +test('shimEnvelope translates a provider envelope with summary', () => { + const ev = shimEnvelope({ + id: 'evt-7', + workspace: 'ws-acme', + type: 'github.pull_request.opened', + occurredAt: '2026-05-12T10:00:00Z', + attempt: 2, + resource: { pr: { number: 42 } }, + summary: { title: 'Add deploy', actor: 'kgnt' } + }); + assert.ok(ev); + if (ev.source === 'cron') { + assert.fail('expected provider event, got cron'); + } + assert.equal(ev.source, 'github'); + assert.equal(ev.type, 'pull_request.opened'); + assert.equal(ev.attempt, 2); + assert.deepEqual(ev.summary, { title: 'Add deploy', actor: 'kgnt' }); +}); + +test('shimEnvelope falls back to attempt=1 and a generated occurredAt when missing', () => { + const before = Date.now(); + const ev = shimEnvelope({ + id: 'evt-x', + workspace: 'ws-a', + type: 'linear.issue.created', + occurredAt: undefined as unknown as string + }); + assert.ok(ev); + if (ev.source === 'cron') return; + assert.equal(ev.attempt, 1); + const occurredAtMs = Date.parse(ev.occurredAt); + assert.ok(Number.isFinite(occurredAtMs)); + assert.ok(occurredAtMs >= before - 1000); +}); + +test('shimEnvelope returns null for unknown sources and malformed envelopes', () => { + assert.equal( + shimEnvelope({ id: 'e', workspace: 'w', type: 'mystery.event.fired', occurredAt: 'x' }), + null + ); + assert.equal(shimEnvelope({ id: '', workspace: 'w', type: 'cron.tick', occurredAt: 'x' }), null); + assert.equal(shimEnvelope({ id: 'e', workspace: '', type: 'cron.tick', occurredAt: 'x' }), null); + assert.equal(shimEnvelope({ id: 'e', workspace: 'w', type: '', occurredAt: 'x' }), null); +}); + +test('handler() brands a function and round-trips identity', () => { + let called = false; + const fn = handler(async () => { + called = true; + }); + assert.equal(typeof fn, 'function'); + assert.equal(isWorkforceHandler(fn), true); + assert.equal(isWorkforceHandler(() => {}), false); + assert.equal(isWorkforceHandler('not a fn'), false); + // Marker is non-enumerable so persona authors don't see it in iteration. + assert.equal(Object.keys(fn).length, 0); + // Identity: handler(f) returns the same callable f. + fn({} as never, {} as never); + assert.equal(called, true); +}); + +test('handler() rejects non-function inputs', () => { + // @ts-expect-error intentional misuse + assert.throws(() => handler('nope'), /expects a function/); + // @ts-expect-error intentional misuse + assert.throws(() => handler(undefined), /expects a function/); +}); diff --git a/packages/runtime/src/shim.ts b/packages/runtime/src/shim.ts new file mode 100644 index 0000000..631cec9 --- /dev/null +++ b/packages/runtime/src/shim.ts @@ -0,0 +1,101 @@ +import type { + WorkforceCronEvent, + WorkforceEvent, + WorkforceEventSource, + WorkforceProviderEvent +} from './types.js'; + +/** + * Raw envelope shape the cloud proactive-runtime gateway delivers per M1 + * spec (`cloud-proactive-runtime-spec/docs/proactive-runtime/spec.md`). + * Kept as a structural type rather than imported from `@agent-relay/agent` + * so the runtime compiles even when that package isn't yet published. + * + * Once `@agent-relay/agent` ships, the shim swaps its internal envelope + * type for the SDK's published one — call sites stay unchanged. + */ +export interface RawGatewayEnvelope { + id: string; + workspace: string; + /** Dotted type like `cron.tick`, `github.pull_request.opened`. */ + type: string; + occurredAt: string; + attempt?: number; + resource?: unknown; + summary?: Record; + expand?: unknown; + digest?: string; + /** Cron-only: the schedule name. */ + name?: string; + /** Cron-only: the schedule's cron expression. */ + cron?: string; +} + +type ProviderSource = Exclude; + +const PROVIDER_SOURCES: ReadonlySet = new Set([ + 'github', + 'linear', + 'slack', + 'notion', + 'jira' +]); + +function isProviderSource(value: string): value is ProviderSource { + return PROVIDER_SOURCES.has(value as ProviderSource); +} + +/** + * Translate a raw gateway envelope into a discriminated WorkforceEvent. + * + * Returns `null` for envelope shapes the v1 runtime does not yet know how + * to dispatch — the caller logs and acks (we don't want to crash-loop the + * runner on an envelope from a newer gateway). + */ +export function shimEnvelope(env: RawGatewayEnvelope): WorkforceEvent | null { + if (typeof env.id !== 'string' || !env.id) return null; + if (typeof env.workspace !== 'string' || !env.workspace) return null; + if (typeof env.type !== 'string' || !env.type) return null; + + const attempt = typeof env.attempt === 'number' && env.attempt > 0 ? env.attempt : 1; + const occurredAt = typeof env.occurredAt === 'string' ? env.occurredAt : new Date().toISOString(); + + if (env.type === 'cron.tick' || env.type.startsWith('cron.')) { + const cron: WorkforceCronEvent = { + source: 'cron', + id: env.id, + occurredAt, + attempt, + workspaceId: env.workspace, + name: typeof env.name === 'string' ? env.name : extractCronName(env.type), + cron: typeof env.cron === 'string' ? env.cron : '' + }; + return cron; + } + + // Provider envelopes are typed as `.` — e.g. + // `github.pull_request.opened`. Split once on the first dot. + const firstDot = env.type.indexOf('.'); + if (firstDot <= 0) return null; + const providerCandidate = env.type.slice(0, firstDot); + if (!isProviderSource(providerCandidate)) return null; + + const providerEvent: WorkforceProviderEvent = { + source: providerCandidate, + id: env.id, + occurredAt, + attempt, + workspaceId: env.workspace, + type: env.type.slice(firstDot + 1), + payload: env.resource ?? null, + ...(env.summary ? { summary: env.summary } : {}) + }; + return providerEvent; +} + +function extractCronName(typeStr: string): string { + // Accepts both `cron.tick` (no name) and `cron.tick:` form + // observed in some adapter outputs. + const colon = typeStr.indexOf(':'); + return colon > 0 ? typeStr.slice(colon + 1) : ''; +} diff --git a/packages/runtime/src/types.ts b/packages/runtime/src/types.ts new file mode 100644 index 0000000..d215c2a --- /dev/null +++ b/packages/runtime/src/types.ts @@ -0,0 +1,222 @@ +import type { + PersonaSpec, + PersonaTier, + PersonaMemoryScope +} from '@agentworkforce/persona-kit'; +import type { GithubClient } from './clients/github.js'; + +/** + * Source of an event delivered to a persona's `onEvent` handler. The + * runtime narrows the rest of the envelope based on this discriminator. + * + * Sources today: `cron` (schedule tick), the Tier-1 Relayfile providers + * (`github`, `linear`, `slack`, `notion`, `jira`). Additional sources land + * as cloud proactive-runtime milestones M2/M3 ship. + */ +export type WorkforceEventSource = + | 'cron' + | 'github' + | 'linear' + | 'slack' + | 'notion' + | 'jira'; + +/** Common envelope fields every event carries, regardless of source. */ +interface WorkforceEventBase { + /** Stable, idempotency-safe identifier; the runtime dedupes on this. */ + id: string; + /** ISO timestamp the event fired at the source (not at delivery). */ + occurredAt: string; + /** Delivery attempt count, 1 for first delivery. */ + attempt: number; + /** Workspace this event is scoped to. */ + workspaceId: string; +} + +export interface WorkforceCronEvent extends WorkforceEventBase { + source: 'cron'; + /** Schedule name as declared in the persona's `schedules[].name`. */ + name: string; + /** The persona's resolved cron expression for the schedule. */ + cron: string; +} + +/** Provider-specific event payload — kept loose for v1. */ +export interface WorkforceProviderEvent extends WorkforceEventBase { + source: Exclude; + /** Provider-normalized event name (e.g. `pull_request.opened`). */ + type: string; + /** Raw provider payload, normalized by the Relayfile adapter. */ + payload: unknown; + /** Optional summary the gateway computed (M2). Missing on M1. */ + summary?: { + title?: string; + status?: string; + actor?: string; + [key: string]: unknown; + }; +} + +export type WorkforceEvent = WorkforceCronEvent | WorkforceProviderEvent; + +/** + * Result of a harness invocation. The runtime translates whatever the + * underlying harness streamed into this minimal shape. + */ +export interface HarnessRunResult { + /** Final stdout/output from the harness. */ + output: string; + /** Process exit code; 0 on success. */ + exitCode: number; + /** Wall-clock duration in milliseconds. */ + durationMs: number; +} + +export interface HarnessRunArgs { + /** Prompt or task description handed to the harness. */ + prompt: string; + /** Working directory inside the sandbox; defaults to ctx.sandbox.cwd. */ + cwd?: string; + /** Which persona tier to use; defaults to the persona's `defaultTier`. */ + tier?: PersonaTier; + /** Override or extend the persona's `inputs` for this run. */ + inputs?: Record; + /** Environment overrides merged on top of the persona's `env`. */ + env?: Record; +} + +export interface SandboxExecArgs { + cwd?: string; + env?: Record; + timeoutMs?: number; +} + +export interface SandboxExecResult { + output: string; + exitCode: number; +} + +export interface SandboxContext { + /** Absolute path the runner sees as its working tree. */ + cwd: string; + exec(cmd: string, opts?: SandboxExecArgs): Promise; + readFile(path: string): Promise; + writeFile(path: string, contents: string): Promise; +} + +export interface MemorySaveOptions { + tags?: string[]; + scope?: PersonaMemoryScope; + /** Optional expiry in milliseconds from now. */ + expiresInMs?: number; +} + +export interface MemoryRecallOptions { + limit?: number; + scopes?: PersonaMemoryScope[]; + tags?: string[]; +} + +export interface MemoryItem { + id: string; + content: string; + tags: string[]; + scope: PersonaMemoryScope; + createdAt: string; +} + +export interface MemoryContext { + save(content: string, opts?: MemorySaveOptions): Promise; + recall(query: string, opts?: MemoryRecallOptions): Promise; +} + +export interface WorkflowRunHandle { + runId: string; + completion(): Promise<{ output: unknown; status: 'success' | 'failure' }>; +} + +export interface WorkflowContext { + run(name: string, args?: Record): Promise; + status(runId: string): Promise<{ status: 'pending' | 'success' | 'failure'; output?: unknown; error?: string }>; +} + +export interface ScheduleContext { + at(when: Date, payload: unknown): Promise; + cancel(name: string): Promise; +} + +/** + * Minimal LLM context for handlers that want raw inference (without + * spawning the persona's full harness). Backed by either workforce-billed + * tokens or the user's connected subscription, per the persona's + * `useSubscription` flag. + */ +export interface LlmContext { + complete(prompt: string, opts?: { maxTokens?: number; tier?: PersonaTier }): Promise; +} + +/** + * Per-integration clients attached to the ctx. `github` is concrete today; + * `linear`/`slack`/`notion`/`jira` are typed as `unknown` until they ship + * — handlers narrow them with a runtime check (`if (ctx.linear)`) and + * cast against the future client interface. Adding a typed client is a + * one-file change here, no breaking change for personas already on the + * runtime. + */ +export interface IntegrationClients { + github?: GithubClient; + linear?: unknown; + slack?: unknown; + notion?: unknown; + jira?: unknown; +} + +/** + * The context object handlers receive on every event invocation. Per- + * integration fields are populated only for providers the persona + * declared in `integrations`. Cron-only personas get a context with all + * integration fields undefined. + */ +export interface WorkforceCtx extends IntegrationClients { + /** Read-only persona metadata, useful for branching on traits/tier. */ + readonly persona: PersonaSpec; + /** Workspace the agent is deployed into. */ + readonly workspaceId: string; + /** Logical agent name (defaults to `persona.id`). */ + readonly agentName: string; + /** Raw inference, billed or subscription-backed per persona config. */ + llm: LlmContext; + /** Spawn the persona's harness inside the sandbox. */ + harness: { + run(args: HarnessRunArgs): Promise; + }; + /** Sandbox shell + filesystem. */ + sandbox: SandboxContext; + /** Persistent memory (no-op when persona.memory is false or unset). */ + memory: MemoryContext; + /** Cloud workflows invocation (HTTP). */ + workflow: WorkflowContext; + /** Schedule one-off follow-up ticks. */ + schedule: ScheduleContext; + /** Structured logger; every line is forwarded to the gateway. */ + log: (level: 'debug' | 'info' | 'warn' | 'error', message: string, attrs?: Record) => void; +} + +/** + * Default-export shape from a persona's `onEvent` file. The runtime calls + * this with a fully-constructed ctx and the discriminated event. + * + * Errors thrown from `onEvent` are caught by the runtime, logged, and (per + * persona `options.onError` defaults) retried with backoff. + */ +export type WorkforceHandler = (ctx: WorkforceCtx, event: WorkforceEvent) => Promise | void; + +/** + * Public type returned by `handler(...)`. Identity at runtime; the wrapper + * exists for type narrowing + future-proofing (we may add metadata, e.g. + * declared capabilities, to the returned function later). + */ +export interface WorkforceHandlerExport { + (ctx: WorkforceCtx, event: WorkforceEvent): Promise | void; + readonly __workforceHandler: true; +} diff --git a/packages/runtime/tsconfig.json b/packages/runtime/tsconfig.json new file mode 100644 index 0000000..df59da5 --- /dev/null +++ b/packages/runtime/tsconfig.json @@ -0,0 +1,8 @@ +{ + "extends": "../../tsconfig.base.json", + "compilerOptions": { + "rootDir": "src", + "outDir": "dist" + }, + "include": ["src/**/*.ts"] +} diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 47347ed..9f7a133 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -26,6 +26,9 @@ importers: packages/cli: dependencies: + '@agentworkforce/deploy': + specifier: workspace:* + version: link:../deploy '@agentworkforce/persona-kit': specifier: workspace:* version: link:../persona-kit @@ -42,6 +45,21 @@ importers: specifier: ^9.4.0 version: 9.4.0 + packages/deploy: + dependencies: + '@agentworkforce/persona-kit': + specifier: workspace:* + version: link:../persona-kit + '@agentworkforce/runtime': + specifier: workspace:* + version: link:../runtime + '@daytonaio/sdk': + specifier: ^0.148.0 + version: 0.148.0(ws@8.20.0) + esbuild: + specifier: ^0.25.0 + version: 0.25.12 + packages/persona-kit: dependencies: '@relayfile/local-mount': @@ -50,6 +68,12 @@ importers: packages/personas-core: {} + packages/runtime: + dependencies: + '@agentworkforce/persona-kit': + specifier: workspace:* + version: link:../persona-kit + packages/workload-router: dependencies: '@agentworkforce/persona-kit': @@ -58,6 +82,171 @@ importers: packages: + '@aws-crypto/crc32@5.2.0': + resolution: {integrity: sha512-nLbCWqQNgUiwwtFsen1AdzAtvuLRsQS8rYgMuxCrdKf9kOssamGLuPwyTY9wyYblNr9+1XM8v6zoDTPPSIeANg==} + engines: {node: '>=16.0.0'} + + '@aws-crypto/crc32c@5.2.0': + resolution: {integrity: sha512-+iWb8qaHLYKrNvGRbiYRHSdKRWhto5XlZUEBwDjYNf+ly5SVYG6zEoYIdxvf5R3zyeP16w4PLBn3rH1xc74Rag==} + + '@aws-crypto/sha1-browser@5.2.0': + resolution: {integrity: sha512-OH6lveCFfcDjX4dbAvCFSYUjJZjDr/3XJ3xHtjn3Oj5b9RjojQo8npoLeA/bNwkOkrSQ0wgrHzXk4tDRxGKJeg==} + + '@aws-crypto/sha256-browser@5.2.0': + resolution: {integrity: sha512-AXfN/lGotSQwu6HNcEsIASo7kWXZ5HYWvfOmSNKDsEqC4OashTp8alTmaz+F7TC2L083SFv5RdB+qU3Vs1kZqw==} + + '@aws-crypto/sha256-js@5.2.0': + resolution: {integrity: sha512-FFQQyu7edu4ufvIZ+OadFpHHOt+eSTBaYaki44c+akjg7qZg9oOQeLlk77F6tSYqjDAFClrHJk9tMf0HdVyOvA==} + engines: {node: '>=16.0.0'} + + '@aws-crypto/supports-web-crypto@5.2.0': + resolution: {integrity: sha512-iAvUotm021kM33eCdNfwIN//F77/IADDSs58i+MDaOqFrVjZo9bAal0NK7HurRuWLLpF1iLX7gbWrjHjeo+YFg==} + + '@aws-crypto/util@5.2.0': + resolution: {integrity: sha512-4RkU9EsI6ZpBve5fseQlGNUWKMa1RLPQ1dnjnQoe07ldfIzcsGb5hC5W0Dm7u423KWzawlrpbjXBrXCEv9zazQ==} + + '@aws-sdk/client-s3@3.1045.0': + resolution: {integrity: sha512-fsuO3Y6t+3Ro9Bsg41DKj4Sfy53CGSrhnMldNplWmG8Tx0UbYk+YDa4RD1hVlJpERw4JBmPkl0+J9qlxMh1pcA==} + engines: {node: '>=20.0.0'} + + '@aws-sdk/core@3.974.8': + resolution: {integrity: sha512-njR2qoG6ZuB0kvAS2FyICsFZJ6gmCcf2X/7JcD14sUvGDm26wiZ5BrA6LOiUxKFEF+IVe7kdroxyE00YlkiYsw==} + engines: {node: '>=20.0.0'} + + '@aws-sdk/crc64-nvme@3.972.7': + resolution: {integrity: sha512-QUagVVBbC8gODCF6e1aV0mE2TXWB9Opz4k8EJFdNrujUVQm5R4AjJa1mpOqzwOuROBzqJU9zawzig7M96L8Ejg==} + engines: {node: '>=20.0.0'} + + '@aws-sdk/credential-provider-env@3.972.34': + resolution: {integrity: sha512-XT0jtf8Fw9JE6ppsQeoNnZRiG+jqRixMT1v1ZR17G60UvVdsQmTG8nbEyHuEPfMxDXEhfdARaM/XiEhca4lGHQ==} + engines: {node: '>=20.0.0'} + + '@aws-sdk/credential-provider-http@3.972.36': + resolution: {integrity: sha512-DPoGWfy7J7RKxvbf5kOKIGQkD2ek3dbKgzKIGrnLuvZBz5myU+Im/H6pmc14QcnFbqHMqxvtWSgRDSJW3qXLQg==} + engines: {node: '>=20.0.0'} + + '@aws-sdk/credential-provider-ini@3.972.38': + resolution: {integrity: sha512-oDzUBu2MGJFgoar05sPMCwSrhw44ASyccrHzj66vO69OZqi7I6hZZxXfuPLC8OCzW7C+sU+bI73XHij41yekgQ==} + engines: {node: '>=20.0.0'} + + '@aws-sdk/credential-provider-login@3.972.38': + resolution: {integrity: sha512-g1NosS8qe4OF++G2UFCM5ovSkgipC7YYor5KCWatG0UoMSO5YFj9C8muePlyVmOBV/WTI16Jo3/s1NUo/o1Bww==} + engines: {node: '>=20.0.0'} + + '@aws-sdk/credential-provider-node@3.972.39': + resolution: {integrity: sha512-HEswDQyxUtadoZ/bJsPPENHg7R0Lzym5LuMksJeHvqhCOpP+rtkDLKI4/ZChH4w3cf5kG8n6bZuI8PzajoiqMg==} + engines: {node: '>=20.0.0'} + + '@aws-sdk/credential-provider-process@3.972.34': + resolution: {integrity: sha512-T3IFs4EVmVi1dVN5RciFnklCANSzvrQd/VuHY9ThHSQmYkTogjcGkoJEr+oNUPQZnso52183088NqysMPji1/Q==} + engines: {node: '>=20.0.0'} + + '@aws-sdk/credential-provider-sso@3.972.38': + resolution: {integrity: sha512-5ZxG+t0+3Q3QPh8KEjX6syskhgNf7I0MN7oGioTf6Lm1NTjfP7sIcYGNsthXC2qR8vcD3edNZwCr2ovfSSWuRA==} + engines: {node: '>=20.0.0'} + + '@aws-sdk/credential-provider-web-identity@3.972.38': + resolution: {integrity: sha512-lYHFF30DGI20jZcYX8cm6Ns0V7f1dDN6g/MBDLTyD/5iw+bXs3yBr2iAiHDkx4RFU5JgsnZvCHYKiRVPRdmOgw==} + engines: {node: '>=20.0.0'} + + '@aws-sdk/lib-storage@3.1045.0': + resolution: {integrity: sha512-F7dp2ST/83Iz6JTMMmUYEXxg7R1JDewfvzJeWWiDTwe0vLsg67JTN7OsBe6G8DWYbLQ+EyPWkyc3oFnxOjCVfg==} + engines: {node: '>=20.0.0'} + peerDependencies: + '@aws-sdk/client-s3': ^3.1045.0 + + '@aws-sdk/middleware-bucket-endpoint@3.972.10': + resolution: {integrity: sha512-Vbc2frZH7wXlMNd+ZZSXUEs/l1Sv8Jj4zUnIfwrYF5lwaLdXHZ9xx4U3rjUcaye3HRhFVc+E5DbBxpRAbB16BA==} + engines: {node: '>=20.0.0'} + + '@aws-sdk/middleware-expect-continue@3.972.10': + resolution: {integrity: sha512-2Yn0f1Qiq/DjxYR3wfI3LokXnjOhFM7Ssn4LTdFDIxRMCE6I32MAsVnhPX1cUZsuVA9tiZtwwhlSLAtFGxAZlQ==} + engines: {node: '>=20.0.0'} + + '@aws-sdk/middleware-flexible-checksums@3.974.16': + resolution: {integrity: sha512-6ru8doI0/XzszqLIPXf0E/V7HhAw1Pu94010XCKYtBUfD0LxF0BuOzrUf8OQGR6j2o6wgKTHUniOmndQycHwCA==} + engines: {node: '>=20.0.0'} + + '@aws-sdk/middleware-host-header@3.972.10': + resolution: {integrity: sha512-IJSsIMeVQ8MMCPbuh1AbltkFhLBLXn7aejzfX5YKT/VLDHn++Dcz8886tXckE+wQssyPUhaXrJhdakO2VilRhg==} + engines: {node: '>=20.0.0'} + + '@aws-sdk/middleware-location-constraint@3.972.10': + resolution: {integrity: sha512-rI3NZvJcEvjoD0+0PI0iUAwlPw2IlSlhyvgBK/3WkKJQE/YiKFedd9dMN2lVacdNxPNhxL/jzQaKQdrGtQagjQ==} + engines: {node: '>=20.0.0'} + + '@aws-sdk/middleware-logger@3.972.10': + resolution: {integrity: sha512-OOuGvvz1Dm20SjZo5oEBePFqxt5nf8AwkNDSyUHvD9/bfNASmstcYxFAHUowy4n6Io7mWUZ04JURZwSBvyQanQ==} + engines: {node: '>=20.0.0'} + + '@aws-sdk/middleware-recursion-detection@3.972.11': + resolution: {integrity: sha512-+zz6f79Kj9V5qFK2P+D8Ehjnw4AhphAlCAsPjUqEcInA9umtSSKMrHbSagEeOIsDNuvVrH98bjRHcyQukTrhaQ==} + engines: {node: '>=20.0.0'} + + '@aws-sdk/middleware-sdk-s3@3.972.37': + resolution: {integrity: sha512-Km7M+i8DrLArVzrid1gfxeGhYHBd3uxvE77g0s5a52zPSVosxzQBnJ0gwWb6NIp/DOk8gsBMhi7V+cpJG0ndTA==} + engines: {node: '>=20.0.0'} + + '@aws-sdk/middleware-ssec@3.972.10': + resolution: {integrity: sha512-Gli9A0u8EVVb+5bFDGS/QbSVg28w/wpEidg1ggVcSj65BDTdGR6punsOcVjqdiu1i42WHWo51MCvARPIIz9juw==} + engines: {node: '>=20.0.0'} + + '@aws-sdk/middleware-user-agent@3.972.38': + resolution: {integrity: sha512-iz+B29TXcAZsJpwB+AwG/TTGA5l/VnmMZ2UxtiySOZjI6gCdmviXPwdgzcmuazMy16rXoPY4mYCGe7zdNKfx5A==} + engines: {node: '>=20.0.0'} + + '@aws-sdk/nested-clients@3.997.6': + resolution: {integrity: sha512-WBDnqatJl+kGObpfmfSxqnXeYTu3Me8wx8WCtvoxX3pfWrrTv8I4WTMSSs7PZqcRcVh8WeUKMgGFjMG+52SR1w==} + engines: {node: '>=20.0.0'} + + '@aws-sdk/region-config-resolver@3.972.13': + resolution: {integrity: sha512-CvJ2ZIjK/jVD/lbOpowBVElJyC1YxLTIJ13yM0AEo0t2v7swOzGjSA6lJGH+DwZXQhcjUjoYwc8bVYCX5MDr1A==} + engines: {node: '>=20.0.0'} + + '@aws-sdk/signature-v4-multi-region@3.996.25': + resolution: {integrity: sha512-+CMIt3e1VzlklAECmG+DtP1sV8iKq25FuA0OKpnJ4KA0kxUtd7CgClY7/RU6VzJBQwbN4EJ9Ue6plvqx1qGadw==} + engines: {node: '>=20.0.0'} + + '@aws-sdk/token-providers@3.1041.0': + resolution: {integrity: sha512-Th7kPI6YPtvJUcdznooXJMy+9rQWjmEF81LxaJssngBzuysK4a/x+l8kjm1zb7nYsUPbndnBdUnwng/3PLvtGw==} + engines: {node: '>=20.0.0'} + + '@aws-sdk/types@3.973.8': + resolution: {integrity: sha512-gjlAdtHMbtR9X5iIhVUvbVcy55KnznpC6bkDUWW9z915bi0ckdUr5cjf16Kp6xq0bP5HBD2xzgbL9F9Quv5vUw==} + engines: {node: '>=20.0.0'} + + '@aws-sdk/util-arn-parser@3.972.3': + resolution: {integrity: sha512-HzSD8PMFrvgi2Kserxuff5VitNq2sgf3w9qxmskKDiDTThWfVteJxuCS9JXiPIPtmCrp+7N9asfIaVhBFORllA==} + engines: {node: '>=20.0.0'} + + '@aws-sdk/util-endpoints@3.996.8': + resolution: {integrity: sha512-oOZHcRDihk5iEe5V25NVWg45b3qEA8OpHWVdU/XQh8Zj4heVPAJqWvMphQnU7LkufmUo10EpvFPZuQMiFLJK3g==} + engines: {node: '>=20.0.0'} + + '@aws-sdk/util-locate-window@3.965.5': + resolution: {integrity: sha512-WhlJNNINQB+9qtLtZJcpQdgZw3SCDCpXdUJP7cToGwHbCWCnRckGlc6Bx/OhWwIYFNAn+FIydY8SZ0QmVu3xTQ==} + engines: {node: '>=20.0.0'} + + '@aws-sdk/util-user-agent-browser@3.972.10': + resolution: {integrity: sha512-FAzqXvfEssGdSIz8ejatan0bOdx1qefBWKF/gWmVBXIP1HkS7v/wjjaqrAGGKvyihrXTXW00/2/1nTJtxpXz7g==} + + '@aws-sdk/util-user-agent-node@3.973.24': + resolution: {integrity: sha512-ZWwlkjcIp7cEL8ZfTpTAPNkwx25p7xol0xlKoWVVf22+nsjwmLcHYtTPjIV1cSpmB/b6DaK4cb1fSkvCXHgRdw==} + engines: {node: '>=20.0.0'} + peerDependencies: + aws-crt: '>=1.0.0' + peerDependenciesMeta: + aws-crt: + optional: true + + '@aws-sdk/xml-builder@3.972.22': + resolution: {integrity: sha512-PMYKKtJd70IsSG0yHrdAbxBr+ZWBKLvzFZfD3/urxgf6hXVMzuU5M+3MJ5G67RpOmLBu1fAUN65SbWuKUCOlAA==} + engines: {node: '>=20.0.0'} + + '@aws/lambda-invoke-store@0.2.4': + resolution: {integrity: sha512-iY8yvjE0y651BixKNPgmv1WrQc+GZ142sb0z4gYnChDDY2YqI4P/jsSopBWrKfAt7LOJAkOXt7rC/hms+WclQQ==} + engines: {node: '>=18.0.0'} + '@clack/core@0.3.5': resolution: {integrity: sha512-5cfhQNH+1VQ2xLQlmzXMqUoiaH0lRBq9/CLW9lTyMbuKLC3+xEK01tHVvyut++mLOn5urSHmkm6I0Lg9MaJSTQ==} @@ -66,6 +255,391 @@ packages: bundledDependencies: - is-unicode-supported + '@daytonaio/api-client@0.148.0': + resolution: {integrity: sha512-5/1fvSF69SEaKluddVB6QxVAyRI6LEsUim7m4oSJCtB5UfIuq1b/k5svTkFbi1dag1UJEypr8xw7eTaVFvnBMA==} + + '@daytonaio/sdk@0.148.0': + resolution: {integrity: sha512-/h+cJnIVuX0xXit9kyxrUh5c1Xf//7Xk5xyf4ULPhzT5xpBGakZnaCNKb0AvS4kpaK1mU75+kT/onxGlc6wMGw==} + + '@daytonaio/toolbox-api-client@0.148.0': + resolution: {integrity: sha512-lLhl6XHCLYWTtI7/uH57+AEp8nS6q/Y1tgZg686qFrDW6lIdhTF7vVViGd4Z9XfmdoVSlMpV/2I2UnyNryWDtQ==} + + '@esbuild/aix-ppc64@0.25.12': + resolution: {integrity: sha512-Hhmwd6CInZ3dwpuGTF8fJG6yoWmsToE+vYgD4nytZVxcu1ulHpUQRAB1UJ8+N1Am3Mz4+xOByoQoSZf4D+CpkA==} + engines: {node: '>=18'} + cpu: [ppc64] + os: [aix] + + '@esbuild/android-arm64@0.25.12': + resolution: {integrity: sha512-6AAmLG7zwD1Z159jCKPvAxZd4y/VTO0VkprYy+3N2FtJ8+BQWFXU+OxARIwA46c5tdD9SsKGZ/1ocqBS/gAKHg==} + engines: {node: '>=18'} + cpu: [arm64] + os: [android] + + '@esbuild/android-arm@0.25.12': + resolution: {integrity: sha512-VJ+sKvNA/GE7Ccacc9Cha7bpS8nyzVv0jdVgwNDaR4gDMC/2TTRc33Ip8qrNYUcpkOHUT5OZ0bUcNNVZQ9RLlg==} + engines: {node: '>=18'} + cpu: [arm] + os: [android] + + '@esbuild/android-x64@0.25.12': + resolution: {integrity: sha512-5jbb+2hhDHx5phYR2By8GTWEzn6I9UqR11Kwf22iKbNpYrsmRB18aX/9ivc5cabcUiAT/wM+YIZ6SG9QO6a8kg==} + engines: {node: '>=18'} + cpu: [x64] + os: [android] + + '@esbuild/darwin-arm64@0.25.12': + resolution: {integrity: sha512-N3zl+lxHCifgIlcMUP5016ESkeQjLj/959RxxNYIthIg+CQHInujFuXeWbWMgnTo4cp5XVHqFPmpyu9J65C1Yg==} + engines: {node: '>=18'} + cpu: [arm64] + os: [darwin] + + '@esbuild/darwin-x64@0.25.12': + resolution: {integrity: sha512-HQ9ka4Kx21qHXwtlTUVbKJOAnmG1ipXhdWTmNXiPzPfWKpXqASVcWdnf2bnL73wgjNrFXAa3yYvBSd9pzfEIpA==} + engines: {node: '>=18'} + cpu: [x64] + os: [darwin] + + '@esbuild/freebsd-arm64@0.25.12': + resolution: {integrity: sha512-gA0Bx759+7Jve03K1S0vkOu5Lg/85dou3EseOGUes8flVOGxbhDDh/iZaoek11Y8mtyKPGF3vP8XhnkDEAmzeg==} + engines: {node: '>=18'} + cpu: [arm64] + os: [freebsd] + + '@esbuild/freebsd-x64@0.25.12': + resolution: {integrity: sha512-TGbO26Yw2xsHzxtbVFGEXBFH0FRAP7gtcPE7P5yP7wGy7cXK2oO7RyOhL5NLiqTlBh47XhmIUXuGciXEqYFfBQ==} + engines: {node: '>=18'} + cpu: [x64] + os: [freebsd] + + '@esbuild/linux-arm64@0.25.12': + resolution: {integrity: sha512-8bwX7a8FghIgrupcxb4aUmYDLp8pX06rGh5HqDT7bB+8Rdells6mHvrFHHW2JAOPZUbnjUpKTLg6ECyzvas2AQ==} + engines: {node: '>=18'} + cpu: [arm64] + os: [linux] + + '@esbuild/linux-arm@0.25.12': + resolution: {integrity: sha512-lPDGyC1JPDou8kGcywY0YILzWlhhnRjdof3UlcoqYmS9El818LLfJJc3PXXgZHrHCAKs/Z2SeZtDJr5MrkxtOw==} + engines: {node: '>=18'} + cpu: [arm] + os: [linux] + + '@esbuild/linux-ia32@0.25.12': + resolution: {integrity: sha512-0y9KrdVnbMM2/vG8KfU0byhUN+EFCny9+8g202gYqSSVMonbsCfLjUO+rCci7pM0WBEtz+oK/PIwHkzxkyharA==} + engines: {node: '>=18'} + cpu: [ia32] + os: [linux] + + '@esbuild/linux-loong64@0.25.12': + resolution: {integrity: sha512-h///Lr5a9rib/v1GGqXVGzjL4TMvVTv+s1DPoxQdz7l/AYv6LDSxdIwzxkrPW438oUXiDtwM10o9PmwS/6Z0Ng==} + engines: {node: '>=18'} + cpu: [loong64] + os: [linux] + + '@esbuild/linux-mips64el@0.25.12': + resolution: {integrity: sha512-iyRrM1Pzy9GFMDLsXn1iHUm18nhKnNMWscjmp4+hpafcZjrr2WbT//d20xaGljXDBYHqRcl8HnxbX6uaA/eGVw==} + engines: {node: '>=18'} + cpu: [mips64el] + os: [linux] + + '@esbuild/linux-ppc64@0.25.12': + resolution: {integrity: sha512-9meM/lRXxMi5PSUqEXRCtVjEZBGwB7P/D4yT8UG/mwIdze2aV4Vo6U5gD3+RsoHXKkHCfSxZKzmDssVlRj1QQA==} + engines: {node: '>=18'} + cpu: [ppc64] + os: [linux] + + '@esbuild/linux-riscv64@0.25.12': + resolution: {integrity: sha512-Zr7KR4hgKUpWAwb1f3o5ygT04MzqVrGEGXGLnj15YQDJErYu/BGg+wmFlIDOdJp0PmB0lLvxFIOXZgFRrdjR0w==} + engines: {node: '>=18'} + cpu: [riscv64] + os: [linux] + + '@esbuild/linux-s390x@0.25.12': + resolution: {integrity: sha512-MsKncOcgTNvdtiISc/jZs/Zf8d0cl/t3gYWX8J9ubBnVOwlk65UIEEvgBORTiljloIWnBzLs4qhzPkJcitIzIg==} + engines: {node: '>=18'} + cpu: [s390x] + os: [linux] + + '@esbuild/linux-x64@0.25.12': + resolution: {integrity: sha512-uqZMTLr/zR/ed4jIGnwSLkaHmPjOjJvnm6TVVitAa08SLS9Z0VM8wIRx7gWbJB5/J54YuIMInDquWyYvQLZkgw==} + engines: {node: '>=18'} + cpu: [x64] + os: [linux] + + '@esbuild/netbsd-arm64@0.25.12': + resolution: {integrity: sha512-xXwcTq4GhRM7J9A8Gv5boanHhRa/Q9KLVmcyXHCTaM4wKfIpWkdXiMog/KsnxzJ0A1+nD+zoecuzqPmCRyBGjg==} + engines: {node: '>=18'} + cpu: [arm64] + os: [netbsd] + + '@esbuild/netbsd-x64@0.25.12': + resolution: {integrity: sha512-Ld5pTlzPy3YwGec4OuHh1aCVCRvOXdH8DgRjfDy/oumVovmuSzWfnSJg+VtakB9Cm0gxNO9BzWkj6mtO1FMXkQ==} + engines: {node: '>=18'} + cpu: [x64] + os: [netbsd] + + '@esbuild/openbsd-arm64@0.25.12': + resolution: {integrity: sha512-fF96T6KsBo/pkQI950FARU9apGNTSlZGsv1jZBAlcLL1MLjLNIWPBkj5NlSz8aAzYKg+eNqknrUJ24QBybeR5A==} + engines: {node: '>=18'} + cpu: [arm64] + os: [openbsd] + + '@esbuild/openbsd-x64@0.25.12': + resolution: {integrity: sha512-MZyXUkZHjQxUvzK7rN8DJ3SRmrVrke8ZyRusHlP+kuwqTcfWLyqMOE3sScPPyeIXN/mDJIfGXvcMqCgYKekoQw==} + engines: {node: '>=18'} + cpu: [x64] + os: [openbsd] + + '@esbuild/openharmony-arm64@0.25.12': + resolution: {integrity: sha512-rm0YWsqUSRrjncSXGA7Zv78Nbnw4XL6/dzr20cyrQf7ZmRcsovpcRBdhD43Nuk3y7XIoW2OxMVvwuRvk9XdASg==} + engines: {node: '>=18'} + cpu: [arm64] + os: [openharmony] + + '@esbuild/sunos-x64@0.25.12': + resolution: {integrity: sha512-3wGSCDyuTHQUzt0nV7bocDy72r2lI33QL3gkDNGkod22EsYl04sMf0qLb8luNKTOmgF/eDEDP5BFNwoBKH441w==} + engines: {node: '>=18'} + cpu: [x64] + os: [sunos] + + '@esbuild/win32-arm64@0.25.12': + resolution: {integrity: sha512-rMmLrur64A7+DKlnSuwqUdRKyd3UE7oPJZmnljqEptesKM8wx9J8gx5u0+9Pq0fQQW8vqeKebwNXdfOyP+8Bsg==} + engines: {node: '>=18'} + cpu: [arm64] + os: [win32] + + '@esbuild/win32-ia32@0.25.12': + resolution: {integrity: sha512-HkqnmmBoCbCwxUKKNPBixiWDGCpQGVsrQfJoVGYLPT41XWF8lHuE5N6WhVia2n4o5QK5M4tYr21827fNhi4byQ==} + engines: {node: '>=18'} + cpu: [ia32] + os: [win32] + + '@esbuild/win32-x64@0.25.12': + resolution: {integrity: sha512-alJC0uCZpTFrSL0CCDjcgleBXPnCrEAhTBILpeAp7M/OFgoqtAetfBzX0xM00MUsVVPpVjlPuMbREqnZCXaTnA==} + engines: {node: '>=18'} + cpu: [x64] + os: [win32] + + '@grpc/grpc-js@1.14.3': + resolution: {integrity: sha512-Iq8QQQ/7X3Sac15oB6p0FmUg/klxQvXLeileoqrTRGJYLV+/9tubbr9ipz0GKHjmXVsgFPo/+W+2cA8eNcR+XA==} + engines: {node: '>=12.10.0'} + + '@grpc/proto-loader@0.8.1': + resolution: {integrity: sha512-wtF6h+DY6M3YaDBPAmvuuA6jV8Sif9MjtOI5euKFWRgCDl5PeDpPsHR9u2l6St5ceY8AZgoNDww5+HvEsXFsGg==} + engines: {node: '>=6'} + hasBin: true + + '@iarna/toml@2.2.5': + resolution: {integrity: sha512-trnsAYxU3xnS1gPHPyU961coFyLkh4gAD/0zQ5mymY4yOZ+CYvsPqUbOFSw0aDM4y0tV7tiFxL/1XfXPNC6IPg==} + + '@isaacs/fs-minipass@4.0.1': + resolution: {integrity: sha512-wgm9Ehl2jpeqP3zw/7mo3kRHFp5MEDhqAdwy1fTGkHAwnkGOVsgpvQhL8B5n1qlb01jV3n/bI0ZfZp5lWA1k4w==} + engines: {node: '>=18.0.0'} + + '@js-sdsl/ordered-map@4.4.2': + resolution: {integrity: sha512-iUKgm52T8HOE/makSxjqoWhe95ZJA1/G1sYsGev2JDKUSS14KAgg1LHb+Ba+IPow0xflbnSkOsZcO08C7w1gYw==} + + '@nodable/entities@2.1.0': + resolution: {integrity: sha512-nyT7T3nbMyBI/lvr6L5TyWbFJAI9FTgVRakNoBqCD+PmID8DzFrrNdLLtHMwMszOtqZa8PAOV24ZqDnQrhQINA==} + + '@nodelib/fs.scandir@2.1.5': + resolution: {integrity: sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==} + engines: {node: '>= 8'} + + '@nodelib/fs.stat@2.0.5': + resolution: {integrity: sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==} + engines: {node: '>= 8'} + + '@nodelib/fs.walk@1.2.8': + resolution: {integrity: sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==} + engines: {node: '>= 8'} + + '@opentelemetry/api-logs@0.207.0': + resolution: {integrity: sha512-lAb0jQRVyleQQGiuuvCOTDVspc14nx6XJjP4FspJ1sNARo3Regq4ZZbrc3rN4b1TYSuUCvgH+UXUPug4SLOqEQ==} + engines: {node: '>=8.0.0'} + + '@opentelemetry/api@1.9.1': + resolution: {integrity: sha512-gLyJlPHPZYdAk1JENA9LeHejZe1Ti77/pTeFm/nMXmQH/HFZlcS/O2XJB+L8fkbrNSqhdtlvjBVjxwUYanNH5Q==} + engines: {node: '>=8.0.0'} + + '@opentelemetry/context-async-hooks@2.2.0': + resolution: {integrity: sha512-qRkLWiUEZNAmYapZ7KGS5C4OmBLcP/H2foXeOEaowYCR0wi89fHejrfYfbuLVCMLp/dWZXKvQusdbUEZjERfwQ==} + engines: {node: ^18.19.0 || >=20.6.0} + peerDependencies: + '@opentelemetry/api': '>=1.0.0 <1.10.0' + + '@opentelemetry/core@2.2.0': + resolution: {integrity: sha512-FuabnnUm8LflnieVxs6eP7Z383hgQU4W1e3KJS6aOG3RxWxcHyBxH8fDMHNgu/gFx/M2jvTOW/4/PHhLz6bjWw==} + engines: {node: ^18.19.0 || >=20.6.0} + peerDependencies: + '@opentelemetry/api': '>=1.0.0 <1.10.0' + + '@opentelemetry/core@2.7.1': + resolution: {integrity: sha512-QAqIj32AtK6+pEVNG7EOVxHdE06RP+FM5qpiEJ4RtDcFIqKUZHYhl7/7UY5efhwmwNAg7j8QbJVBLxMerc0+gw==} + engines: {node: ^18.19.0 || >=20.6.0} + peerDependencies: + '@opentelemetry/api': '>=1.0.0 <1.10.0' + + '@opentelemetry/exporter-logs-otlp-grpc@0.207.0': + resolution: {integrity: sha512-K92RN+kQGTMzFDsCzsYNGqOsXRUnko/Ckk+t/yPJao72MewOLgBUTWVHhebgkNfRCYqDz1v3K0aPT9OJkemvgg==} + engines: {node: ^18.19.0 || >=20.6.0} + peerDependencies: + '@opentelemetry/api': ^1.3.0 + + '@opentelemetry/exporter-logs-otlp-http@0.207.0': + resolution: {integrity: sha512-JpOh7MguEUls8eRfkVVW3yRhClo5b9LqwWTOg8+i4gjr/+8eiCtquJnC7whvpTIGyff06cLZ2NsEj+CVP3Mjeg==} + engines: {node: ^18.19.0 || >=20.6.0} + peerDependencies: + '@opentelemetry/api': ^1.3.0 + + '@opentelemetry/exporter-logs-otlp-proto@0.207.0': + resolution: {integrity: sha512-RQJEV/K6KPbQrIUbsrRkEe0ufks1o5OGLHy6jbDD8tRjeCsbFHWfg99lYBRqBV33PYZJXsigqMaAbjWGTFYzLw==} + engines: {node: ^18.19.0 || >=20.6.0} + peerDependencies: + '@opentelemetry/api': ^1.3.0 + + '@opentelemetry/exporter-metrics-otlp-grpc@0.207.0': + resolution: {integrity: sha512-6flX89W54gkwmqYShdcTBR1AEF5C1Ob0O8pDgmLPikTKyEv27lByr9yBmO5WrP0+5qJuNPHrLfgFQFYi6npDGA==} + engines: {node: ^18.19.0 || >=20.6.0} + peerDependencies: + '@opentelemetry/api': ^1.3.0 + + '@opentelemetry/exporter-metrics-otlp-http@0.207.0': + resolution: {integrity: sha512-fG8FAJmvXOrKXGIRN8+y41U41IfVXxPRVwyB05LoMqYSjugx/FSBkMZUZXUT/wclTdmBKtS5MKoi0bEKkmRhSw==} + engines: {node: ^18.19.0 || >=20.6.0} + peerDependencies: + '@opentelemetry/api': ^1.3.0 + + '@opentelemetry/exporter-metrics-otlp-proto@0.207.0': + resolution: {integrity: sha512-kDBxiTeQjaRlUQzS1COT9ic+et174toZH6jxaVuVAvGqmxOkgjpLOjrI5ff8SMMQE69r03L3Ll3nPKekLopLwg==} + engines: {node: ^18.19.0 || >=20.6.0} + peerDependencies: + '@opentelemetry/api': ^1.3.0 + + '@opentelemetry/exporter-prometheus@0.207.0': + resolution: {integrity: sha512-Y5p1s39FvIRmU+F1++j7ly8/KSqhMmn6cMfpQqiDCqDjdDHwUtSq0XI0WwL3HYGnZeaR/VV4BNmsYQJ7GAPrhw==} + engines: {node: ^18.19.0 || >=20.6.0} + peerDependencies: + '@opentelemetry/api': ^1.3.0 + + '@opentelemetry/exporter-trace-otlp-grpc@0.207.0': + resolution: {integrity: sha512-7u2ZmcIx6D4KG/+5np4X2qA0o+O0K8cnUDhR4WI/vr5ZZ0la9J9RG+tkSjC7Yz+2XgL6760gSIM7/nyd3yaBLA==} + engines: {node: ^18.19.0 || >=20.6.0} + peerDependencies: + '@opentelemetry/api': ^1.3.0 + + '@opentelemetry/exporter-trace-otlp-http@0.207.0': + resolution: {integrity: sha512-HSRBzXHIC7C8UfPQdu15zEEoBGv0yWkhEwxqgPCHVUKUQ9NLHVGXkVrf65Uaj7UwmAkC1gQfkuVYvLlD//AnUQ==} + engines: {node: ^18.19.0 || >=20.6.0} + peerDependencies: + '@opentelemetry/api': ^1.3.0 + + '@opentelemetry/exporter-trace-otlp-proto@0.207.0': + resolution: {integrity: sha512-ruUQB4FkWtxHjNmSXjrhmJZFvyMm+tBzHyMm7YPQshApy4wvZUTcrpPyP/A/rCl/8M4BwoVIZdiwijMdbZaq4w==} + engines: {node: ^18.19.0 || >=20.6.0} + peerDependencies: + '@opentelemetry/api': ^1.3.0 + + '@opentelemetry/exporter-zipkin@2.2.0': + resolution: {integrity: sha512-VV4QzhGCT7cWrGasBWxelBjqbNBbyHicWWS/66KoZoe9BzYwFB72SH2/kkc4uAviQlO8iwv2okIJy+/jqqEHTg==} + engines: {node: ^18.19.0 || >=20.6.0} + peerDependencies: + '@opentelemetry/api': ^1.0.0 + + '@opentelemetry/instrumentation-http@0.207.0': + resolution: {integrity: sha512-FC4i5hVixTzuhg4SV2ycTEAYx+0E2hm+GwbdoVPSA6kna0pPVI4etzaA9UkpJ9ussumQheFXP6rkGIaFJjMxsw==} + engines: {node: ^18.19.0 || >=20.6.0} + peerDependencies: + '@opentelemetry/api': ^1.3.0 + + '@opentelemetry/instrumentation@0.207.0': + resolution: {integrity: sha512-y6eeli9+TLKnznrR8AZlQMSJT7wILpXH+6EYq5Vf/4Ao+huI7EedxQHwRgVUOMLFbe7VFDvHJrX9/f4lcwnJsA==} + engines: {node: ^18.19.0 || >=20.6.0} + peerDependencies: + '@opentelemetry/api': ^1.3.0 + + '@opentelemetry/otlp-exporter-base@0.207.0': + resolution: {integrity: sha512-4RQluMVVGMrHok/3SVeSJ6EnRNkA2MINcX88sh+d/7DjGUrewW/WT88IsMEci0wUM+5ykTpPPNbEOoW+jwHnbw==} + engines: {node: ^18.19.0 || >=20.6.0} + peerDependencies: + '@opentelemetry/api': ^1.3.0 + + '@opentelemetry/otlp-grpc-exporter-base@0.207.0': + resolution: {integrity: sha512-eKFjKNdsPed4q9yYqeI5gBTLjXxDM/8jwhiC0icw3zKxHVGBySoDsed5J5q/PGY/3quzenTr3FiTxA3NiNT+nw==} + engines: {node: ^18.19.0 || >=20.6.0} + peerDependencies: + '@opentelemetry/api': ^1.3.0 + + '@opentelemetry/otlp-transformer@0.207.0': + resolution: {integrity: sha512-+6DRZLqM02uTIY5GASMZWUwr52sLfNiEe20+OEaZKhztCs3+2LxoTjb6JxFRd9q1qNqckXKYlUKjbH/AhG8/ZA==} + engines: {node: ^18.19.0 || >=20.6.0} + peerDependencies: + '@opentelemetry/api': ^1.3.0 + + '@opentelemetry/propagator-b3@2.2.0': + resolution: {integrity: sha512-9CrbTLFi5Ee4uepxg2qlpQIozoJuoAZU5sKMx0Mn7Oh+p7UrgCiEV6C02FOxxdYVRRFQVCinYR8Kf6eMSQsIsw==} + engines: {node: ^18.19.0 || >=20.6.0} + peerDependencies: + '@opentelemetry/api': '>=1.0.0 <1.10.0' + + '@opentelemetry/propagator-jaeger@2.2.0': + resolution: {integrity: sha512-FfeOHOrdhiNzecoB1jZKp2fybqmqMPJUXe2ZOydP7QzmTPYcfPeuaclTLYVhK3HyJf71kt8sTl92nV4YIaLaKA==} + engines: {node: ^18.19.0 || >=20.6.0} + peerDependencies: + '@opentelemetry/api': '>=1.0.0 <1.10.0' + + '@opentelemetry/resources@2.2.0': + resolution: {integrity: sha512-1pNQf/JazQTMA0BiO5NINUzH0cbLbbl7mntLa4aJNmCCXSj0q03T5ZXXL0zw4G55TjdL9Tz32cznGClf+8zr5A==} + engines: {node: ^18.19.0 || >=20.6.0} + peerDependencies: + '@opentelemetry/api': '>=1.3.0 <1.10.0' + + '@opentelemetry/resources@2.7.1': + resolution: {integrity: sha512-DeT6KKolmC4e/dRQvMQ/RwlnzhaqeiFOXY5ngoOPJ07GgVVKxZOg9EcrNZb5aTzUn+iCrJldAgOfQm1O/QfPAQ==} + engines: {node: ^18.19.0 || >=20.6.0} + peerDependencies: + '@opentelemetry/api': '>=1.3.0 <1.10.0' + + '@opentelemetry/sdk-logs@0.207.0': + resolution: {integrity: sha512-4MEQmn04y+WFe6cyzdrXf58hZxilvY59lzZj2AccuHW/+BxLn/rGVN/Irsi/F0qfBOpMOrrCLKTExoSL2zoQmg==} + engines: {node: ^18.19.0 || >=20.6.0} + peerDependencies: + '@opentelemetry/api': '>=1.4.0 <1.10.0' + + '@opentelemetry/sdk-metrics@2.2.0': + resolution: {integrity: sha512-G5KYP6+VJMZzpGipQw7Giif48h6SGQ2PFKEYCybeXJsOCB4fp8azqMAAzE5lnnHK3ZVwYQrgmFbsUJO/zOnwGw==} + engines: {node: ^18.19.0 || >=20.6.0} + peerDependencies: + '@opentelemetry/api': '>=1.9.0 <1.10.0' + + '@opentelemetry/sdk-node@0.207.0': + resolution: {integrity: sha512-hnRsX/M8uj0WaXOBvFenQ8XsE8FLVh2uSnn1rkWu4mx+qu7EKGUZvZng6y/95cyzsqOfiaDDr08Ek4jppkIDNg==} + engines: {node: ^18.19.0 || >=20.6.0} + peerDependencies: + '@opentelemetry/api': '>=1.3.0 <1.10.0' + + '@opentelemetry/sdk-trace-base@2.2.0': + resolution: {integrity: sha512-xWQgL0Bmctsalg6PaXExmzdedSp3gyKV8mQBwK/j9VGdCDu2fmXIb2gAehBKbkXCpJ4HPkgv3QfoJWRT4dHWbw==} + engines: {node: ^18.19.0 || >=20.6.0} + peerDependencies: + '@opentelemetry/api': '>=1.3.0 <1.10.0' + + '@opentelemetry/sdk-trace-base@2.7.1': + resolution: {integrity: sha512-NAYIlsF8MPUsKqJMiDQJTMPOmlbawC1Iz/omMLygZ1C9am8fTKYjTaI+OZM+WTY3t3Glo0wnOg/6/pac6RGPPw==} + engines: {node: ^18.19.0 || >=20.6.0} + peerDependencies: + '@opentelemetry/api': '>=1.3.0 <1.10.0' + + '@opentelemetry/sdk-trace-node@2.2.0': + resolution: {integrity: sha512-+OaRja3f0IqGG2kptVeYsrZQK9nKRSpfFrKtRBq4uh6nIB8bTBgaGvYQrQoRrQWQMA5dK5yLhDMDc0dvYvCOIQ==} + engines: {node: ^18.19.0 || >=20.6.0} + peerDependencies: + '@opentelemetry/api': '>=1.0.0 <1.10.0' + + '@opentelemetry/semantic-conventions@1.40.0': + resolution: {integrity: sha512-cifvXDhcqMwwTlTK04GBNeIe7yyo28Mfby85QXFe1Yk8nmi36Ab/5UQwptOx84SsoGNRg+EVSjwzfSZMy6pmlw==} + engines: {node: '>=14'} + '@parcel/watcher-android-arm64@2.5.6': resolution: {integrity: sha512-YQxSS34tPF/6ZG7r/Ih9xy+kP/WwediEUsqmtf0cuCV5TPPKw/PQHRhueUo6JdeFJaqV3pyjm0GdYjZotbRt/A==} engines: {node: '>= 10.0.0'} @@ -148,6 +722,36 @@ packages: resolution: {integrity: sha512-tmmZ3lQxAe/k/+rNnXQRawJ4NjxO2hqiOLTHvWchtGZULp4RyFeh6aU4XdOYBFe2KE1oShQTv4AblOs2iOrNnQ==} engines: {node: '>= 10.0.0'} + '@protobufjs/aspromise@1.1.2': + resolution: {integrity: sha512-j+gKExEuLmKwvz3OgROXtrJ2UG2x8Ch2YZUxahh+s1F2HZ+wAceUNLkvy6zKCPVRkU++ZWQrdxsUeQXmcg4uoQ==} + + '@protobufjs/base64@1.1.2': + resolution: {integrity: sha512-AZkcAA5vnN/v4PDqKyMR5lx7hZttPDgClv83E//FMNhR2TMcLUhfRUBHCmSl0oi9zMgDDqRUJkSxO3wm85+XLg==} + + '@protobufjs/codegen@2.0.5': + resolution: {integrity: sha512-zgXFLzW3Ap33e6d0Wlj4MGIm6Ce8O89n/apUaGNB/jx+hw+ruWEp7EwGUshdLKVRCxZW12fp9r40E1mQrf/34g==} + + '@protobufjs/eventemitter@1.1.0': + resolution: {integrity: sha512-j9ednRT81vYJ9OfVuXG6ERSTdEL1xVsNgqpkxMsbIabzSo3goCjDIveeGv5d03om39ML71RdmrGNjG5SReBP/Q==} + + '@protobufjs/fetch@1.1.0': + resolution: {integrity: sha512-lljVXpqXebpsijW71PZaCYeIcE5on1w5DlQy5WH6GLbFryLUrBD4932W/E2BSpfRJWseIL4v/KPgBFxDOIdKpQ==} + + '@protobufjs/float@1.0.2': + resolution: {integrity: sha512-Ddb+kVXlXst9d+R9PfTIxh1EdNkgoRe5tOX6t01f1lYWOvJnSPDBlG241QLzcyPdoNTsblLUdujGSE4RzrTZGQ==} + + '@protobufjs/inquire@1.1.1': + resolution: {integrity: sha512-mnzgDV26ueAvk7rsbt9L7bE0SuAoqyuys/sMMrmVcN5x9VsxpcG3rqAUSgDyLp0UZlmNfIbQ4fHfCtreVBk8Ew==} + + '@protobufjs/path@1.1.2': + resolution: {integrity: sha512-6JOcJ5Tm08dOHAbdR3GrvP+yUUfkjG5ePsHYczMFLq3ZmMkAD98cDgcT2iA1lJ9NVwFd4tH/iSSoe44YWkltEA==} + + '@protobufjs/pool@1.1.0': + resolution: {integrity: sha512-0kELaGSIDBKvcgS4zkjz1PeddatrjYcmMWOlAuAPwAeccUrPHdUqo/J6LiymHHEiJT5NrF1UVwxY14f+fy4WQw==} + + '@protobufjs/utf8@1.1.1': + resolution: {integrity: sha512-oOAWABowe8EAbMyWKM0tYDKi8Yaox52D+HWZhAIJqQXbqe0xI/GV7FhLWqlEKreMkfDjshR5FKgi3mnle0h6Eg==} + '@relayburn/sdk-darwin-arm64@2.5.2': resolution: {integrity: sha512-OSoUGplSTaoDgfM46r0u198IHEvZFPSvDDfv+FMOXy+TEaloqK/M1iDOy9sY/TWjNCRY/gvILp5fcFwYcuCdIg==} engines: {node: '>=22'} @@ -180,22 +784,242 @@ packages: resolution: {integrity: sha512-kkqSA0J711J4um/IIoDf9e99qrUHEPG3glKscyVmm3h30JZ0PTfUzBNJ2KsHGoZUPtT6jIXIYdswIFb12/Cluw==} engines: {node: '>=18'} + '@smithy/config-resolver@4.5.1': + resolution: {integrity: sha512-abXk3LhODsvRHsk0ZS9ztrg/fZatTa9Z/z4pgx65YSLR+rY6kvUG/1IgcDKEUciR8MfdnkT5oPeHJTy/HhzDIQ==} + engines: {node: '>=18.0.0'} + + '@smithy/core@3.24.1': + resolution: {integrity: sha512-3mT7o4qQyUWttYnVK3A0Z/u3Xha3E81tXn32Tz6vjZiUXhBrkEivpw1hBYfh84iFF9CSzkBU9Y1DJ3Q6RQ231g==} + engines: {node: '>=18.0.0'} + + '@smithy/credential-provider-imds@4.3.1': + resolution: {integrity: sha512-0S/acwHnqX4WrjXzhdiDRxsG2s9SC0cpPIK9nZ1R6UOHd+j7uL28+4bHu22urbLk2TVw3fkp6na/+fkUt/pLNQ==} + engines: {node: '>=18.0.0'} + + '@smithy/eventstream-serde-browser@4.3.1': + resolution: {integrity: sha512-X7MyI1fu8M84IPKk49kO4kb27Mqp6un9/0o/MsA1ngZ5OxxWKGUxPS3S/AJ9q1cPVTSGmRcbaGNfGUSsflTJkg==} + engines: {node: '>=18.0.0'} + + '@smithy/eventstream-serde-config-resolver@4.4.1': + resolution: {integrity: sha512-JZGbSXaBk7JY8VPzsh66ksJ0nTWXbApduFDkA/pEl3aTm2EoAiUZE1Iltp6c+X1bB8kxPQW0mHDfVdYCpWTOzg==} + engines: {node: '>=18.0.0'} + + '@smithy/eventstream-serde-node@4.3.1': + resolution: {integrity: sha512-6Cn4xTNVxn9PWTHSbvf8zmcDhQW8lrLE1Xq5CJgmX6wEvdjS2S0KuE79Aiznv/jx51jpFJ98OuWyE+Bt+oG1MQ==} + engines: {node: '>=18.0.0'} + + '@smithy/fetch-http-handler@5.4.1': + resolution: {integrity: sha512-r7bN6spQ+caZC8AnyvSxkRUb57zt2jhhRw3Z+2Ez8hjq6coIikDBFUUI/+CQ1xx9K6eX1Gx6wUKo4ylU66TIqw==} + engines: {node: '>=18.0.0'} + + '@smithy/hash-blob-browser@4.3.1': + resolution: {integrity: sha512-2fbltQVQYmGd0OzPv2oDMRF0pxkzeIx8cbpx2x6W3UJWGaEyUzVPxF4d0sDXZ/r2obg+RbTyhTidXWlPDsKRKw==} + engines: {node: '>=18.0.0'} + + '@smithy/hash-node@4.3.1': + resolution: {integrity: sha512-u0/zo11mg7yNneoYgTkH4sXwSmcBpbl49o4UNCtQ7hYsXxynsN25KYHmXzqi7TPk5HQL5klGnpU5koOY0O+9hw==} + engines: {node: '>=18.0.0'} + + '@smithy/hash-stream-node@4.3.1': + resolution: {integrity: sha512-4NOnngIoXngbJw9By3u8KXRgqt4vYATpAobNBnNWxOREP7JY3kB0bUmbBNhZ7dtZV/b4auO1eFMD4cLj9OauVg==} + engines: {node: '>=18.0.0'} + + '@smithy/invalid-dependency@4.3.1': + resolution: {integrity: sha512-cLmwtDoulyZvRepAfyV+3rx5oMvuh51dbE+6En3vGC09j3uVSRt1U4oguNu32ub3soGX0oYtBs8E7S2Q4SxTqg==} + engines: {node: '>=18.0.0'} + + '@smithy/is-array-buffer@2.2.0': + resolution: {integrity: sha512-GGP3O9QFD24uGeAXYUjwSTXARoqpZykHadOmA8G5vfJPK0/DC67qa//0qvqrJzL1xc8WQWX7/yc7fwudjPHPhA==} + engines: {node: '>=14.0.0'} + + '@smithy/is-array-buffer@4.3.1': + resolution: {integrity: sha512-9aVG6VjOFVFHC6Z4hGAzIIrsVWpp1QOO4ERQ2k1S19VrgCamUGIBE2ilAnMWCfr+mlowHlLRXBStsTk/2c5HfA==} + engines: {node: '>=18.0.0'} + + '@smithy/md5-js@4.3.1': + resolution: {integrity: sha512-98NalujRdzv6ggVQNYPWpL2K57UKeUB8roIr61u6+JiHd7KUlMQ+sn/vk6IG4XxEjw2vlC7eu/xjYXshUE4XXg==} + engines: {node: '>=18.0.0'} + + '@smithy/middleware-content-length@4.3.1': + resolution: {integrity: sha512-l4BUIP+wljW/Ar+0/QcGdmElI9lalrywfzNijXMBG34Z510FRzPyrDLx/blNTZOAm0C4Mvx5t/bf760CZo1ajg==} + engines: {node: '>=18.0.0'} + + '@smithy/middleware-endpoint@4.5.1': + resolution: {integrity: sha512-qtqu5TS+8Y18ZDkJoiXN5AMW1G4JAg1+xytzpsUvIR5a4EUsgd5HQg12lekEHWpm2TDUmOgg+hBaHK7dvyWdkA==} + engines: {node: '>=18.0.0'} + + '@smithy/middleware-retry@4.6.1': + resolution: {integrity: sha512-eTaQhxs0rfUuAkL2MSKrH8DTO7YCeAgrdN0B2/RAeuHmXQ+x52dk5qUBsi/jtcqe5LxItgq5AG5tI6Cp8c0sow==} + engines: {node: '>=18.0.0'} + + '@smithy/middleware-serde@4.3.1': + resolution: {integrity: sha512-t7YtUe076zWVypVmy1rX91oKi2TFJCkpfFpfMhJFpEIRPP0iL9JxjeSyFQ+1bF45JUfDzOzslUJa150WcSrBug==} + engines: {node: '>=18.0.0'} + + '@smithy/middleware-stack@4.3.1': + resolution: {integrity: sha512-1jKwiKZxCMQNqmp4uVPYA6r+MLGjEtH07gnOUdPgbnjuOIrl/0JY/ICdpQtFgeBsQ/Up01gnSv8GYEL0fb8yvg==} + engines: {node: '>=18.0.0'} + + '@smithy/node-config-provider@4.4.1': + resolution: {integrity: sha512-q7tDJEJXcaSG/8TVpu2f2l9bzxTzDM9geWmltbzsY6Hfh3yiuXXTpLIO8+zwYASPPVFaTJpdKwjSSjdoDoccgw==} + engines: {node: '>=18.0.0'} + + '@smithy/node-http-handler@4.7.1': + resolution: {integrity: sha512-BdEYko85f/ldp68uH8XEyIvo810xFk6eyPH81SRggTOApYHWA+Xu7B2EzLuHbe37WVLaUA7F1fWR3/zBeme2WA==} + engines: {node: '>=18.0.0'} + + '@smithy/property-provider@4.3.1': + resolution: {integrity: sha512-3NHoqVBhzpY2b4YBx9AqyKC4C8nnEjl5FyKuxrCjvnjinG0ODj+yg1xX360nNahT6wghYjSw1SooCt3kIdnqIA==} + engines: {node: '>=18.0.0'} + + '@smithy/protocol-http@5.4.1': + resolution: {integrity: sha512-8irPNCQgYxcSFp1aGcnDNFkTwSA+xPUaFq9V/v1+JXWu8sKr5b3cFmg2kBTkjkvypDmGeNffuNu0x5iqw1NoAw==} + engines: {node: '>=18.0.0'} + + '@smithy/shared-ini-file-loader@4.5.1': + resolution: {integrity: sha512-FKoKxVzdFPhyynFI+SPTWrgOP60fZ4l1UwukWYj4eyhpSmEI7MJ6p58hawIIt9bwp+aek9NEm8Zika7E+GEoeg==} + engines: {node: '>=18.0.0'} + + '@smithy/signature-v4@5.4.1': + resolution: {integrity: sha512-728lZZEWYWubBESrfntNslZQYDKRlJDY4dcDnYbL50+gu35pGPLblu4S0/RH/RDLF6me1M87ECHsHELGL7dA/Q==} + engines: {node: '>=18.0.0'} + + '@smithy/smithy-client@4.13.1': + resolution: {integrity: sha512-IcznNM8Qd9u1X3oflp12tkzyOB4HbT+sfYWlWiyEysgNzSHoWcHUUsTT4y1jjDjtVuuVVQbYks+g1kVd7u1eGQ==} + engines: {node: '>=18.0.0'} + + '@smithy/types@4.14.1': + resolution: {integrity: sha512-59b5HtSVrVR/eYNei3BUj3DCPKD/G7EtDDe7OEJE7i7FtQFugYo6MxbotS8mVJkLNVf8gYaAlEBwwtJ9HzhWSg==} + engines: {node: '>=18.0.0'} + + '@smithy/url-parser@4.3.1': + resolution: {integrity: sha512-tuelFlF2PZR/wogFC58NIrPOv+Zna4N1+3kA161/33D1Gbwvl6Nh4WsAsW05ZyPp0O6CMGsdbb0S2b/qVjRMCw==} + engines: {node: '>=18.0.0'} + + '@smithy/util-base64@4.4.1': + resolution: {integrity: sha512-fTHiwW2xbiRiWzfSk4IGAr3gNZCH4fuRYqt8+IuarsP/YON35576iVdePraZ6yJlFxlCL0eMec3/F7xYqoKzlg==} + engines: {node: '>=18.0.0'} + + '@smithy/util-body-length-browser@4.3.1': + resolution: {integrity: sha512-1scg5t4nV3hV7CZs996/XHb80aDZ5YotH4NcvkW/w/rHj+cSz0aCIzwz8aUNKB4nCDPSHRCbrKoj+TvycYefmw==} + engines: {node: '>=18.0.0'} + + '@smithy/util-body-length-node@4.3.1': + resolution: {integrity: sha512-VRC8MKVPKrgUYThTA7ughcKMfjW6/X92H0wXGJoda0Apw4O5xbXL0GMLz40DTWlsb5hh2iItk6+XL72uJdxYcw==} + engines: {node: '>=18.0.0'} + + '@smithy/util-buffer-from@2.2.0': + resolution: {integrity: sha512-IJdWBbTcMQ6DA0gdNhh/BwrLkDR+ADW5Kr1aZmd4k3DIF6ezMV4R2NIAmT08wQJ3yUK82thHWmC/TnK/wpMMIA==} + engines: {node: '>=14.0.0'} + + '@smithy/util-config-provider@4.3.1': + resolution: {integrity: sha512-lw6L5GF5+W19vO6o3fZwRT2cXEG+8b2LH0b9ppjDT6nIxjUgmljEQGninx5XorylwKZZ4XLVABeroJ8oaF9RmQ==} + engines: {node: '>=18.0.0'} + + '@smithy/util-defaults-mode-browser@4.4.1': + resolution: {integrity: sha512-1rA7w+LjK1WJClsffC81Z/ZtjFt22QsKhBjUYEnZsGVS2nOTfOENKBzdg4SxhdwFvBCjcbpjscUfXOPwE3UHWQ==} + engines: {node: '>=18.0.0'} + + '@smithy/util-defaults-mode-node@4.3.1': + resolution: {integrity: sha512-1fk1wfQHBenQD5NitVKOFgW0wsISYAFPIXGyStJWAeCtMyRhgHYvtJxBk2rwGWA0L5QX6oM6yeHSLKPFMk59ww==} + engines: {node: '>=18.0.0'} + + '@smithy/util-endpoints@3.5.1': + resolution: {integrity: sha512-yORYzJD5zoGbSDkAACr0dIjDiSEA3X8h8lggDENl1dkKpCG0TQIoItPBqtvuJHzFFjRXumcoH+/09xIuixGyCw==} + engines: {node: '>=18.0.0'} + + '@smithy/util-middleware@4.3.1': + resolution: {integrity: sha512-SRRMDcIgVXVhVbxviBaSZbuWuVW3jD08wv4ESV0V2oiw0Mki8TPVQ5IxwD3MvSTPg52QYsRP+JoMw5WdUdeWAg==} + engines: {node: '>=18.0.0'} + + '@smithy/util-retry@4.4.1': + resolution: {integrity: sha512-qkgWgwn1xw0GoY9Ea/B6FrYSPfHA0zyOtJkokwxZuvucRf2+2lfTut6adi4e4Y7LEAaxsFG7r6i05mtDCxbHKA==} + engines: {node: '>=18.0.0'} + + '@smithy/util-stream@4.6.1': + resolution: {integrity: sha512-GjZfEft0M0V3n2YM/LGkr5LeLd8gxHUIzW0rUz6VtTtlAq245GxHlJghvoPEjJHKTj255iHFAiA4IsIdK40Ueg==} + engines: {node: '>=18.0.0'} + + '@smithy/util-utf8@2.3.0': + resolution: {integrity: sha512-R8Rdn8Hy72KKcebgLiv8jQcQkXoLMOGGv5uI1/k0l+snqkOzQ1R0ChUBCxWMlBsFMekWjq0wRudIweFs7sKT5A==} + engines: {node: '>=14.0.0'} + + '@smithy/util-utf8@4.3.1': + resolution: {integrity: sha512-FtRrSnriXtOs4+J8/y9SbQ1xmN71hrOsN/YJr5PQQj5nR1l7YNkGS/TEk4gr0WN7gyrUqw8/RFaYVjI18732ZA==} + engines: {node: '>=18.0.0'} + + '@smithy/util-waiter@4.4.1': + resolution: {integrity: sha512-G/gWDykZNL0NVcd1qXkoKm45jxJECp6q53DSomM5QKMsyAMEsGksVq+HwgonqYxfFJEzzHi6ljtWKXVS1pl0/Q==} + engines: {node: '>=18.0.0'} + '@types/node@22.19.15': resolution: {integrity: sha512-F0R/h2+dsy5wJAUe3tAU6oqa2qbWY5TpNfL/RGmo1y38hiyO1w3x2jPtt76wmuaJI4DQnOBu21cNXQ2STIUUWg==} + acorn-import-attributes@1.9.5: + resolution: {integrity: sha512-n02Vykv5uA3eHGM/Z2dQrcD56kL8TyDb2p1+0P83PClMnC/nc+anbQRhIOWnSq4Ke/KvDPrY3C9hDtC/A3eHnQ==} + peerDependencies: + acorn: ^8 + + acorn@8.16.0: + resolution: {integrity: sha512-UVJyE9MttOsBQIDKw1skb9nAwQuR5wuGD3+82K6JgJlm/Y+KI92oNsMNGZCYdDsVtRHSak0pcV5Dno5+4jh9sw==} + engines: {node: '>=0.4.0'} + hasBin: true + agent-trajectories@0.5.3: resolution: {integrity: sha512-Yb/aVdxE1uWdSWWoXocFSpg1yriLlz/aEMVj4f6RktMFBCqPK3ZbNutTiWxGzBtv7swId7ivoWK9wTYxRd3/Kg==} engines: {node: '>=20.0.0'} hasBin: true + ansi-regex@5.0.1: + resolution: {integrity: sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==} + engines: {node: '>=8'} + ansi-regex@6.2.2: resolution: {integrity: sha512-Bq3SmSpyFHaWjPk8If9yc6svM8c56dB5BAtW4Qbw5jHTwwXXcTLoRMkpDJp6VL0XzlWaCHTXrkFURMYmD0sLqg==} engines: {node: '>=12'} + ansi-styles@4.3.0: + resolution: {integrity: sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==} + engines: {node: '>=8'} + + asynckit@0.4.0: + resolution: {integrity: sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==} + + axios@1.16.0: + resolution: {integrity: sha512-6hp5CwvTPlN2A31g5dxnwAX0orzM7pmCRDLnZSX772mv8WDqICwFjowHuPs04Mc8deIld1+ejhtaMn5vp6b+1w==} + + base64-js@1.5.1: + resolution: {integrity: sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==} + + bowser@2.14.1: + resolution: {integrity: sha512-tzPjzCxygAKWFOJP011oxFHs57HzIhOEracIgAePE4pqB3LikALKnSzUyU4MGs9/iCEUuHlAJTjTc5M+u7YEGg==} + + braces@3.0.3: + resolution: {integrity: sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==} + engines: {node: '>=8'} + + buffer@5.6.0: + resolution: {integrity: sha512-/gDYp/UtU0eA1ys8bOs9J6a+E/KWIY+DZ+Q2WESNUA0jFRsJOc0SNUO6xJ5SGA1xueg3NL65W6s+NY5l9cunuw==} + + busboy@1.6.0: + resolution: {integrity: sha512-8SFQbg/0hQ9xy3UNTB0YEnsNBbWfhf7RtnzpL7TkBiTBRfrQ9Fxcnz7VJsleJpyp6rVLvXiuORqjlHi5q+PYuA==} + engines: {node: '>=10.16.0'} + + call-bind-apply-helpers@1.0.2: + resolution: {integrity: sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ==} + engines: {node: '>= 0.4'} + chalk@5.6.2: resolution: {integrity: sha512-7NzBL0rN6fMUW+f7A6Io4h40qQlG+xGmtMxfbnH/K7TAtt8JQWVQK+6g0UXKMeVJoyV5EkkNsErQ8pVD3bLHbA==} engines: {node: ^12.17.0 || ^14.13 || >=16.0.0} + chownr@3.0.0: + resolution: {integrity: sha512-+IxzY9BZOQd/XuYPRmrvEVjF/nqj5kgT4kEq7VofrDoM1MxoRjEWkrCC3EtLi59TVawxTAn+orJwFQcrqEN1+g==} + engines: {node: '>=18'} + + cjs-module-lexer@2.2.0: + resolution: {integrity: sha512-4bHTS2YuzUvtoLjdy+98ykbNB5jS0+07EvFNXerqZQJ89F7DI6ET7OQo/HJuW6K0aVsKA9hj9/RVb2kQVOrPDQ==} + cli-cursor@5.0.0: resolution: {integrity: sha512-aCj4O5wKyszjMmDT4tZj93kxyydN/K5zPWSCe6/0AV/AA1pqe5ZBIw0a2ZfPQV7lL5/yb5HsUreJ6UFAF1tEQw==} engines: {node: '>=18'} @@ -204,26 +1028,184 @@ packages: resolution: {integrity: sha512-bXfOC4QcT1tKXGorxL3wbJm6XJPDqEnij2gQ2m7ESQuE+/z9YFIWnl/5RpTiKWbMq3EVKR4fRLJGn6DVfu0mpw==} engines: {node: '>=18.20'} - commander@12.1.0: - resolution: {integrity: sha512-Vw8qHK3bZM9y/P10u3Vib8o/DdkvA2OtPtZvD871QKjy74Wj1WSKFILMPRPSdUSx5RFK1arlJzEtA4PkFgnbuA==} + cliui@8.0.1: + resolution: {integrity: sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==} + engines: {node: '>=12'} + + color-convert@2.0.1: + resolution: {integrity: sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==} + engines: {node: '>=7.0.0'} + + color-name@1.1.4: + resolution: {integrity: sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==} + + combined-stream@1.0.8: + resolution: {integrity: sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==} + engines: {node: '>= 0.8'} + + commander@12.1.0: + resolution: {integrity: sha512-Vw8qHK3bZM9y/P10u3Vib8o/DdkvA2OtPtZvD871QKjy74Wj1WSKFILMPRPSdUSx5RFK1arlJzEtA4PkFgnbuA==} engines: {node: '>=18'} + debug@4.4.3: + resolution: {integrity: sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==} + engines: {node: '>=6.0'} + peerDependencies: + supports-color: '*' + peerDependenciesMeta: + supports-color: + optional: true + + delayed-stream@1.0.0: + resolution: {integrity: sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==} + engines: {node: '>=0.4.0'} + detect-libc@2.1.2: resolution: {integrity: sha512-Btj2BOOO83o3WyH59e8MgXsxEQVcarkUOpEYrubB0urwnN10yQ364rsiByU11nZlqWYZm05i/of7io4mzihBtQ==} engines: {node: '>=8'} + dotenv@17.4.2: + resolution: {integrity: sha512-nI4U3TottKAcAD9LLud4Cb7b2QztQMUEfHbvhTH09bqXTxnSie8WnjPALV/WMCrJZ6UV/qHJ6L03OqO3LcdYZw==} + engines: {node: '>=12'} + + dunder-proto@1.0.1: + resolution: {integrity: sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==} + engines: {node: '>= 0.4'} + + emoji-regex@8.0.0: + resolution: {integrity: sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==} + + es-define-property@1.0.1: + resolution: {integrity: sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==} + engines: {node: '>= 0.4'} + + es-errors@1.3.0: + resolution: {integrity: sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==} + engines: {node: '>= 0.4'} + + es-object-atoms@1.1.1: + resolution: {integrity: sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==} + engines: {node: '>= 0.4'} + + es-set-tostringtag@2.1.0: + resolution: {integrity: sha512-j6vWzfrGVfyXxge+O0x5sh6cvxAog0a/4Rdd2K36zCMV5eJ+/+tOAngRO8cODMNWbVRdVlmGZQL2YS3yR8bIUA==} + engines: {node: '>= 0.4'} + + esbuild@0.25.12: + resolution: {integrity: sha512-bbPBYYrtZbkt6Os6FiTLCTFxvq4tt3JKall1vRwshA3fdVztsLAatFaZobhkBC8/BrPetoa0oksYoKXoG4ryJg==} + engines: {node: '>=18'} + hasBin: true + + escalade@3.2.0: + resolution: {integrity: sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==} + engines: {node: '>=6'} + + events@3.3.0: + resolution: {integrity: sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q==} + engines: {node: '>=0.8.x'} + + expand-tilde@2.0.2: + resolution: {integrity: sha512-A5EmesHW6rfnZ9ysHQjPdJRni0SRar0tjtG5MNtm9n5TUvsYU8oozprtRD4AqHxcZWWlVuAmQo2nWKfN9oyjTw==} + engines: {node: '>=0.10.0'} + + fast-glob@3.3.3: + resolution: {integrity: sha512-7MptL8U0cqcFdzIzwOTHoilX9x5BrNqye7Z/LuC7kCMRio1EMSyqRK3BEAUD7sXRq4iT4AzTVuZdhgQ2TCvYLg==} + engines: {node: '>=8.6.0'} + + fast-xml-builder@1.2.0: + resolution: {integrity: sha512-00aAWieqff+ZJhsXA4g1g7M8k+7AYoMUUHF+/zFb5U6Uv/P0Vl4QZo84/IcufzYalLuEj9928bXN9PbbFzMF0Q==} + + fast-xml-parser@5.7.2: + resolution: {integrity: sha512-P7oW7tLbYnhOLQk/Gv7cZgzgMPP/XN03K02/Jy6Y/NHzyIAIpxuZIM/YqAkfiXFPxA2CTm7NtCijK9EDu09u2w==} + hasBin: true + + fastq@1.20.1: + resolution: {integrity: sha512-GGToxJ/w1x32s/D2EKND7kTil4n8OVk/9mycTc4VDza13lOvpUZTGX3mFSCtV9ksdGBVzvsyAVLM6mHFThxXxw==} + + fill-range@7.1.1: + resolution: {integrity: sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==} + engines: {node: '>=8'} + + follow-redirects@1.16.0: + resolution: {integrity: sha512-y5rN/uOsadFT/JfYwhxRS5R7Qce+g3zG97+JrtFZlC9klX/W5hD7iiLzScI4nZqUS7DNUdhPgw4xI8W2LuXlUw==} + engines: {node: '>=4.0'} + peerDependencies: + debug: '*' + peerDependenciesMeta: + debug: + optional: true + + form-data@4.0.5: + resolution: {integrity: sha512-8RipRLol37bNs2bhoV67fiTEvdTrbMUYcFTiy3+wuuOnUog2QBHCZWXDRijWQfAkhBj2Uf5UnVaiWwA5vdd82w==} + engines: {node: '>= 6'} + + forwarded-parse@2.1.2: + resolution: {integrity: sha512-alTFZZQDKMporBH77856pXgzhEzaUVmLCDk+egLgIgHst3Tpndzz8MnKe+GzRJRfvVdn69HhpW7cmXzvtLvJAw==} + + function-bind@1.1.2: + resolution: {integrity: sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==} + + get-caller-file@2.0.5: + resolution: {integrity: sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==} + engines: {node: 6.* || 8.* || >= 10.*} + get-east-asian-width@1.5.0: resolution: {integrity: sha512-CQ+bEO+Tva/qlmw24dCejulK5pMzVnUOFOijVogd3KQs07HnRIgp8TGipvCCRT06xeYEbpbgwaCxglFyiuIcmA==} engines: {node: '>=18'} + get-intrinsic@1.3.0: + resolution: {integrity: sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==} + engines: {node: '>= 0.4'} + + get-proto@1.0.1: + resolution: {integrity: sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==} + engines: {node: '>= 0.4'} + + glob-parent@5.1.2: + resolution: {integrity: sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==} + engines: {node: '>= 6'} + + gopd@1.2.0: + resolution: {integrity: sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==} + engines: {node: '>= 0.4'} + + has-symbols@1.1.0: + resolution: {integrity: sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==} + engines: {node: '>= 0.4'} + + has-tostringtag@1.0.2: + resolution: {integrity: sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==} + engines: {node: '>= 0.4'} + + hasown@2.0.3: + resolution: {integrity: sha512-ej4AhfhfL2Q2zpMmLo7U1Uv9+PyhIZpgQLGT1F9miIGmiCJIoCgSmczFdrc97mWT4kVY72KA+WnnhJ5pghSvSg==} + engines: {node: '>= 0.4'} + + homedir-polyfill@1.0.3: + resolution: {integrity: sha512-eSmmWE5bZTK2Nou4g0AI3zZ9rswp7GRKoKXS1BLUkvPviOqs4YTN1djQIqrXy9k5gEtdLPy86JjRwsNM9tnDcA==} + engines: {node: '>=0.10.0'} + + ieee754@1.2.1: + resolution: {integrity: sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==} + ignore@7.0.5: resolution: {integrity: sha512-Hs59xBNfUIunMFgWAbGX5cq6893IbWg4KnrjbYwX3tx0ztorVgTDA6B2sxf8ejHJ4wz8BqGUMYlnzNBer5NvGg==} engines: {node: '>= 4'} + import-in-the-middle@2.0.6: + resolution: {integrity: sha512-3vZV3jX0XRFW3EJDTwzWoZa+RH1b8eTTx6YOCjglrLyPuepwoBti1k3L2dKwdCUrnVEfc5CuRuGstaC/uQJJaw==} + + inherits@2.0.4: + resolution: {integrity: sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==} + is-extglob@2.1.1: resolution: {integrity: sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==} engines: {node: '>=0.10.0'} + is-fullwidth-code-point@3.0.0: + resolution: {integrity: sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==} + engines: {node: '>=8'} + is-glob@4.0.3: resolution: {integrity: sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==} engines: {node: '>=0.10.0'} @@ -232,18 +1214,67 @@ packages: resolution: {integrity: sha512-qP1vozQRI+BMOPcjFzrjXuQvdak2pHNUMZoeG2eRbiSqyvbEf/wQtEOTOX1guk6E3t36RkaqiSt8A/6YElNxLQ==} engines: {node: '>=12'} + is-number@7.0.0: + resolution: {integrity: sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==} + engines: {node: '>=0.12.0'} + is-unicode-supported@2.1.0: resolution: {integrity: sha512-mE00Gnza5EEB3Ds0HfMyllZzbBrmLOX3vfWoj9A9PEnTfratQ/BcaJOuMhnkhjXvb2+FkY3VuHqtAGpTPmglFQ==} engines: {node: '>=18'} + isomorphic-ws@5.0.0: + resolution: {integrity: sha512-muId7Zzn9ywDsyXgTIafTry2sV3nySZeUDe6YedVd1Hvuuep5AsIlqK+XefWpYTyJG5e503F2xIuT2lcU6rCSw==} + peerDependencies: + ws: '*' + + lodash.camelcase@4.3.0: + resolution: {integrity: sha512-TwuEnCnxbc3rAvhf/LbG7tJUDzhqXyFnv3dtzLOPgCG/hODL7WFnsbwktkD7yUV0RrreP/l1PALq/YSg6VvjlA==} + log-symbols@7.0.1: resolution: {integrity: sha512-ja1E3yCr9i/0hmBVaM0bfwDjnGy8I/s6PP4DFp+yP+a+mrHO4Rm7DtmnqROTUkHIkqffC84YY7AeqX6oFk0WFg==} engines: {node: '>=18'} + long@5.3.2: + resolution: {integrity: sha512-mNAgZ1GmyNhD7AuqnTG3/VQ26o760+ZYBPKjPvugO8+nLbYfX6TVpJPseBvopbdY+qpZ/lKUnmEc1LeZYS3QAA==} + + math-intrinsics@1.1.0: + resolution: {integrity: sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==} + engines: {node: '>= 0.4'} + + merge2@1.4.1: + resolution: {integrity: sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==} + engines: {node: '>= 8'} + + micromatch@4.0.8: + resolution: {integrity: sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==} + engines: {node: '>=8.6'} + + mime-db@1.52.0: + resolution: {integrity: sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==} + engines: {node: '>= 0.6'} + + mime-types@2.1.35: + resolution: {integrity: sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==} + engines: {node: '>= 0.6'} + mimic-function@5.0.1: resolution: {integrity: sha512-VP79XUPxV2CigYP3jWwAUFSku2aKqBH7uTAapFWCBqutsbmDo96KY5o8uh6U+/YSIn5OxJnXp73beVkpqMIGhA==} engines: {node: '>=18'} + minipass@7.1.3: + resolution: {integrity: sha512-tEBHqDnIoM/1rXME1zgka9g6Q2lcoCkxHLuc7ODJ5BxbP5d4c2Z5cGgtXAku59200Cx7diuHTOYfSBD8n6mm8A==} + engines: {node: '>=16 || 14 >=14.17'} + + minizlib@3.1.0: + resolution: {integrity: sha512-KZxYo1BUkWD2TVFLr0MQoM8vUUigWD3LlD83a/75BqC+4qE0Hb1Vo5v1FgcfaNXvfXzr+5EhQ6ing/CaBijTlw==} + engines: {node: '>= 18'} + + module-details-from-path@1.0.4: + resolution: {integrity: sha512-EGWKgxALGMgzvxYF1UyGTy0HXX/2vHLkw6+NvDKW2jypWbHpjQuj4UMcqQWXHERJhVGKikolT06G3bcKe4fi7w==} + + ms@2.1.3: + resolution: {integrity: sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==} + node-addon-api@7.1.1: resolution: {integrity: sha512-5m3bsyrjFWE1xf7nz7YXdN4udnVtXK6/Yfgn5qnahL6bCkf2yKt4k3nuTKAtT4r3IG8JNR2ncsIMdZuAzJjHQQ==} @@ -255,17 +1286,69 @@ packages: resolution: {integrity: sha512-84cglkRILFxdtA8hAvLNdMrtBpPNBTrQ9/ulg0FA7xLMnD6mifv+enAIeRmvtv+WgdCE+LPGOfQmtJRrVaIVhQ==} engines: {node: '>=20'} + parse-passwd@1.0.0: + resolution: {integrity: sha512-1Y1A//QUXEZK7YKz+rD9WydcE1+EuPr6ZBgKecAB8tmoW6UFv0NREVJe1p+jRxtThkcbbKkfwIbWJe/IeE6m2Q==} + engines: {node: '>=0.10.0'} + + path-expression-matcher@1.5.0: + resolution: {integrity: sha512-cbrerZV+6rvdQrrD+iGMcZFEiiSrbv9Tfdkvnusy6y0x0GKBXREFg/Y65GhIfm0tnLntThhzCnfKwp1WRjeCyQ==} + engines: {node: '>=14.0.0'} + + pathe@2.0.3: + resolution: {integrity: sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w==} + picocolors@1.1.1: resolution: {integrity: sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==} + picomatch@2.3.2: + resolution: {integrity: sha512-V7+vQEJ06Z+c5tSye8S+nHUfI51xoXIXjHQ99cQtKUkQqqO1kO/KCJUfZXuB47h/YBlDhah2H3hdUGXn8ie0oA==} + engines: {node: '>=8.6'} + picomatch@4.0.4: resolution: {integrity: sha512-QP88BAKvMam/3NxH6vj2o21R6MjxZUAd6nlwAS/pnGvN9IVLocLHxGYIzFhg6fUQ+5th6P4dv4eW9jX3DSIj7A==} engines: {node: '>=12'} + protobufjs@7.5.7: + resolution: {integrity: sha512-NGnrxS/nLKUo5nkbVQxlC71sB4hdfImdYIbFeSCidxtwATx0AHRPcANSLd0q5Bb2BkoSWo2iisQhGg5/r+ihbA==} + engines: {node: '>=12.0.0'} + + proxy-from-env@2.1.0: + resolution: {integrity: sha512-cJ+oHTW1VAEa8cJslgmUZrc+sjRKgAKl3Zyse6+PV38hZe/V6Z14TbCuXcan9F9ghlz4QrFr2c92TNF82UkYHA==} + engines: {node: '>=10'} + + queue-microtask@1.2.3: + resolution: {integrity: sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==} + + readable-stream@3.6.2: + resolution: {integrity: sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==} + engines: {node: '>= 6'} + + require-directory@2.1.1: + resolution: {integrity: sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==} + engines: {node: '>=0.10.0'} + + require-in-the-middle@8.0.1: + resolution: {integrity: sha512-QT7FVMXfWOYFbeRBF6nu+I6tr2Tf3u0q8RIEjNob/heKY/nh7drD/k7eeMFmSQgnTtCzLDcCu/XEnpW2wk4xCQ==} + engines: {node: '>=9.3.0 || >=8.10.0 <9.0.0'} + restore-cursor@5.1.0: resolution: {integrity: sha512-oMA2dcrw6u0YfxJQXm342bFKX/E4sG9rbTzO9ptUcR/e8A33cHuvStiYOwH7fszkZlZ1z/ta9AAoPk2F4qIOHA==} engines: {node: '>=18'} + reusify@1.1.0: + resolution: {integrity: sha512-g6QUff04oZpHs0eG5p83rFLhHeV00ug/Yf9nZM6fLeUrPguBTkTQOdpAWWspMh55TZfVQDPaN3NQJfbVRAxdIw==} + engines: {iojs: '>=1.0.0', node: '>=0.10.0'} + + run-parallel@1.2.0: + resolution: {integrity: sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==} + + safe-buffer@5.2.1: + resolution: {integrity: sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==} + + shell-quote@1.8.3: + resolution: {integrity: sha512-ObmnIF4hXNg1BqhnHmgbDETF8dLPCggZWBjkQfhZpbszZnYur5DUljTcCHii5LC3J5E0yeO/1LIMyH+UvHQgyw==} + engines: {node: '>= 0.4'} + signal-exit@4.1.0: resolution: {integrity: sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==} engines: {node: '>=14'} @@ -277,41 +1360,981 @@ packages: resolution: {integrity: sha512-eCPu1qRxPVkl5605OTWF8Wz40b4Mf45NY5LQmVPQ599knfs5QhASUm9GbJ5BDMDOXgrnh0wyEdvzmL//YMlw0A==} engines: {node: '>=18'} + stream-browserify@3.0.0: + resolution: {integrity: sha512-H73RAHsVBapbim0tU2JwwOiXUj+fikfiaoYAKHF3VJfA0pe2BCzkhAHBlLG6REzE+2WNZcxOXjK7lkso+9euLA==} + + streamsearch@1.1.0: + resolution: {integrity: sha512-Mcc5wHehp9aXz1ax6bZUyY5afg9u2rv5cqQI3mRrYkGC8rW2hM02jWuwjtL++LS5qinSyhj2QfLyNsuc+VsExg==} + engines: {node: '>=10.0.0'} + + string-width@4.2.3: + resolution: {integrity: sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==} + engines: {node: '>=8'} + string-width@8.2.0: resolution: {integrity: sha512-6hJPQ8N0V0P3SNmP6h2J99RLuzrWz2gvT7VnK5tKvrNqJoyS9W4/Fb8mo31UiPvy00z7DQXkP2hnKBVav76thw==} engines: {node: '>=20'} + string_decoder@1.3.0: + resolution: {integrity: sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==} + + strip-ansi@6.0.1: + resolution: {integrity: sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==} + engines: {node: '>=8'} + strip-ansi@7.2.0: resolution: {integrity: sha512-yDPMNjp4WyfYBkHnjIRLfca1i6KMyGCtsVgoKe/z1+6vukgaENdgGBZt+ZmKPc4gavvEZ5OgHfHdrazhgNyG7w==} engines: {node: '>=12'} + strnum@2.3.0: + resolution: {integrity: sha512-ums3KNd42PGyx5xaoVTO1mjU1bH3NpY4vsrVlnv9PNGqQj8wd7rJ6nEypLrJ7z5vxK5RP0yMLo6J/Gsm62DI5Q==} + + tar@7.5.15: + resolution: {integrity: sha512-dzGK0boVlC4W5QFuQN1EFSl3bIDYsk7Tj40U6eIBnK2k/8ml7TZ5agbI5j5+qnoVcAA+rNtBml8SEiLxZpNqRQ==} + engines: {node: '>=18'} + + to-regex-range@5.0.1: + resolution: {integrity: sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==} + engines: {node: '>=8.0'} + + tslib@2.8.1: + resolution: {integrity: sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==} + typescript@5.9.3: resolution: {integrity: sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==} engines: {node: '>=14.17'} hasBin: true - undici-types@6.21.0: - resolution: {integrity: sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==} + undici-types@6.21.0: + resolution: {integrity: sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==} + + util-deprecate@1.0.2: + resolution: {integrity: sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==} + + wrap-ansi@7.0.0: + resolution: {integrity: sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==} + engines: {node: '>=10'} + + ws@8.20.0: + resolution: {integrity: sha512-sAt8BhgNbzCtgGbt2OxmpuryO63ZoDk/sqaB/znQm94T4fCEsy/yV+7CdC1kJhOU9lboAEU7R3kquuycDoibVA==} + engines: {node: '>=10.0.0'} + peerDependencies: + bufferutil: ^4.0.1 + utf-8-validate: '>=5.0.2' + peerDependenciesMeta: + bufferutil: + optional: true + utf-8-validate: + optional: true + + xml-naming@0.1.0: + resolution: {integrity: sha512-k8KO9hrMyNk6tUWqUfkTEZbezRRpONVOzUTnc97VnCvyj6Tf9lyUR9EDAIeiVLv56jsMcoXEwjW8Kv5yPY52lw==} + engines: {node: '>=16.0.0'} + + y18n@5.0.8: + resolution: {integrity: sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==} + engines: {node: '>=10'} + + yallist@5.0.0: + resolution: {integrity: sha512-YgvUTfwqyc7UXVMrB+SImsVYSmTS8X/tSrtdNZMImM+n7+QTriRXyXim0mBrTXNeqzVF0KWGgHPeiyViFFrNDw==} + engines: {node: '>=18'} + + yargs-parser@21.1.1: + resolution: {integrity: sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==} + engines: {node: '>=12'} + + yargs@17.7.2: + resolution: {integrity: sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==} + engines: {node: '>=12'} + + yoctocolors@2.1.2: + resolution: {integrity: sha512-CzhO+pFNo8ajLM2d2IW/R93ipy99LWjtwblvC1RsoSUMZgyLbYFr221TnSNT7GjGdYui6P459mw9JH/g/zW2ug==} + engines: {node: '>=18'} + + zod@3.25.76: + resolution: {integrity: sha512-gzUt/qt81nXsFGKIFcC3YnfEAx5NkunCfnDlvuBSSFS02bcXu4Lmea0AFIUwbLWxWPx3d9p8S5QoaujKcNQxcQ==} + +snapshots: + + '@aws-crypto/crc32@5.2.0': + dependencies: + '@aws-crypto/util': 5.2.0 + '@aws-sdk/types': 3.973.8 + tslib: 2.8.1 + + '@aws-crypto/crc32c@5.2.0': + dependencies: + '@aws-crypto/util': 5.2.0 + '@aws-sdk/types': 3.973.8 + tslib: 2.8.1 + + '@aws-crypto/sha1-browser@5.2.0': + dependencies: + '@aws-crypto/supports-web-crypto': 5.2.0 + '@aws-crypto/util': 5.2.0 + '@aws-sdk/types': 3.973.8 + '@aws-sdk/util-locate-window': 3.965.5 + '@smithy/util-utf8': 2.3.0 + tslib: 2.8.1 + + '@aws-crypto/sha256-browser@5.2.0': + dependencies: + '@aws-crypto/sha256-js': 5.2.0 + '@aws-crypto/supports-web-crypto': 5.2.0 + '@aws-crypto/util': 5.2.0 + '@aws-sdk/types': 3.973.8 + '@aws-sdk/util-locate-window': 3.965.5 + '@smithy/util-utf8': 2.3.0 + tslib: 2.8.1 + + '@aws-crypto/sha256-js@5.2.0': + dependencies: + '@aws-crypto/util': 5.2.0 + '@aws-sdk/types': 3.973.8 + tslib: 2.8.1 + + '@aws-crypto/supports-web-crypto@5.2.0': + dependencies: + tslib: 2.8.1 + + '@aws-crypto/util@5.2.0': + dependencies: + '@aws-sdk/types': 3.973.8 + '@smithy/util-utf8': 2.3.0 + tslib: 2.8.1 + + '@aws-sdk/client-s3@3.1045.0': + dependencies: + '@aws-crypto/sha1-browser': 5.2.0 + '@aws-crypto/sha256-browser': 5.2.0 + '@aws-crypto/sha256-js': 5.2.0 + '@aws-sdk/core': 3.974.8 + '@aws-sdk/credential-provider-node': 3.972.39 + '@aws-sdk/middleware-bucket-endpoint': 3.972.10 + '@aws-sdk/middleware-expect-continue': 3.972.10 + '@aws-sdk/middleware-flexible-checksums': 3.974.16 + '@aws-sdk/middleware-host-header': 3.972.10 + '@aws-sdk/middleware-location-constraint': 3.972.10 + '@aws-sdk/middleware-logger': 3.972.10 + '@aws-sdk/middleware-recursion-detection': 3.972.11 + '@aws-sdk/middleware-sdk-s3': 3.972.37 + '@aws-sdk/middleware-ssec': 3.972.10 + '@aws-sdk/middleware-user-agent': 3.972.38 + '@aws-sdk/region-config-resolver': 3.972.13 + '@aws-sdk/signature-v4-multi-region': 3.996.25 + '@aws-sdk/types': 3.973.8 + '@aws-sdk/util-endpoints': 3.996.8 + '@aws-sdk/util-user-agent-browser': 3.972.10 + '@aws-sdk/util-user-agent-node': 3.973.24 + '@smithy/config-resolver': 4.5.1 + '@smithy/core': 3.24.1 + '@smithy/eventstream-serde-browser': 4.3.1 + '@smithy/eventstream-serde-config-resolver': 4.4.1 + '@smithy/eventstream-serde-node': 4.3.1 + '@smithy/fetch-http-handler': 5.4.1 + '@smithy/hash-blob-browser': 4.3.1 + '@smithy/hash-node': 4.3.1 + '@smithy/hash-stream-node': 4.3.1 + '@smithy/invalid-dependency': 4.3.1 + '@smithy/md5-js': 4.3.1 + '@smithy/middleware-content-length': 4.3.1 + '@smithy/middleware-endpoint': 4.5.1 + '@smithy/middleware-retry': 4.6.1 + '@smithy/middleware-serde': 4.3.1 + '@smithy/middleware-stack': 4.3.1 + '@smithy/node-config-provider': 4.4.1 + '@smithy/node-http-handler': 4.7.1 + '@smithy/protocol-http': 5.4.1 + '@smithy/smithy-client': 4.13.1 + '@smithy/types': 4.14.1 + '@smithy/url-parser': 4.3.1 + '@smithy/util-base64': 4.4.1 + '@smithy/util-body-length-browser': 4.3.1 + '@smithy/util-body-length-node': 4.3.1 + '@smithy/util-defaults-mode-browser': 4.4.1 + '@smithy/util-defaults-mode-node': 4.3.1 + '@smithy/util-endpoints': 3.5.1 + '@smithy/util-middleware': 4.3.1 + '@smithy/util-retry': 4.4.1 + '@smithy/util-stream': 4.6.1 + '@smithy/util-utf8': 4.3.1 + '@smithy/util-waiter': 4.4.1 + tslib: 2.8.1 + transitivePeerDependencies: + - aws-crt + + '@aws-sdk/core@3.974.8': + dependencies: + '@aws-sdk/types': 3.973.8 + '@aws-sdk/xml-builder': 3.972.22 + '@smithy/core': 3.24.1 + '@smithy/node-config-provider': 4.4.1 + '@smithy/property-provider': 4.3.1 + '@smithy/protocol-http': 5.4.1 + '@smithy/signature-v4': 5.4.1 + '@smithy/smithy-client': 4.13.1 + '@smithy/types': 4.14.1 + '@smithy/util-base64': 4.4.1 + '@smithy/util-middleware': 4.3.1 + '@smithy/util-retry': 4.4.1 + '@smithy/util-utf8': 4.3.1 + tslib: 2.8.1 + + '@aws-sdk/crc64-nvme@3.972.7': + dependencies: + '@smithy/types': 4.14.1 + tslib: 2.8.1 + + '@aws-sdk/credential-provider-env@3.972.34': + dependencies: + '@aws-sdk/core': 3.974.8 + '@aws-sdk/types': 3.973.8 + '@smithy/property-provider': 4.3.1 + '@smithy/types': 4.14.1 + tslib: 2.8.1 + + '@aws-sdk/credential-provider-http@3.972.36': + dependencies: + '@aws-sdk/core': 3.974.8 + '@aws-sdk/types': 3.973.8 + '@smithy/fetch-http-handler': 5.4.1 + '@smithy/node-http-handler': 4.7.1 + '@smithy/property-provider': 4.3.1 + '@smithy/protocol-http': 5.4.1 + '@smithy/smithy-client': 4.13.1 + '@smithy/types': 4.14.1 + '@smithy/util-stream': 4.6.1 + tslib: 2.8.1 + + '@aws-sdk/credential-provider-ini@3.972.38': + dependencies: + '@aws-sdk/core': 3.974.8 + '@aws-sdk/credential-provider-env': 3.972.34 + '@aws-sdk/credential-provider-http': 3.972.36 + '@aws-sdk/credential-provider-login': 3.972.38 + '@aws-sdk/credential-provider-process': 3.972.34 + '@aws-sdk/credential-provider-sso': 3.972.38 + '@aws-sdk/credential-provider-web-identity': 3.972.38 + '@aws-sdk/nested-clients': 3.997.6 + '@aws-sdk/types': 3.973.8 + '@smithy/credential-provider-imds': 4.3.1 + '@smithy/property-provider': 4.3.1 + '@smithy/shared-ini-file-loader': 4.5.1 + '@smithy/types': 4.14.1 + tslib: 2.8.1 + transitivePeerDependencies: + - aws-crt + + '@aws-sdk/credential-provider-login@3.972.38': + dependencies: + '@aws-sdk/core': 3.974.8 + '@aws-sdk/nested-clients': 3.997.6 + '@aws-sdk/types': 3.973.8 + '@smithy/property-provider': 4.3.1 + '@smithy/protocol-http': 5.4.1 + '@smithy/shared-ini-file-loader': 4.5.1 + '@smithy/types': 4.14.1 + tslib: 2.8.1 + transitivePeerDependencies: + - aws-crt + + '@aws-sdk/credential-provider-node@3.972.39': + dependencies: + '@aws-sdk/credential-provider-env': 3.972.34 + '@aws-sdk/credential-provider-http': 3.972.36 + '@aws-sdk/credential-provider-ini': 3.972.38 + '@aws-sdk/credential-provider-process': 3.972.34 + '@aws-sdk/credential-provider-sso': 3.972.38 + '@aws-sdk/credential-provider-web-identity': 3.972.38 + '@aws-sdk/types': 3.973.8 + '@smithy/credential-provider-imds': 4.3.1 + '@smithy/property-provider': 4.3.1 + '@smithy/shared-ini-file-loader': 4.5.1 + '@smithy/types': 4.14.1 + tslib: 2.8.1 + transitivePeerDependencies: + - aws-crt + + '@aws-sdk/credential-provider-process@3.972.34': + dependencies: + '@aws-sdk/core': 3.974.8 + '@aws-sdk/types': 3.973.8 + '@smithy/property-provider': 4.3.1 + '@smithy/shared-ini-file-loader': 4.5.1 + '@smithy/types': 4.14.1 + tslib: 2.8.1 + + '@aws-sdk/credential-provider-sso@3.972.38': + dependencies: + '@aws-sdk/core': 3.974.8 + '@aws-sdk/nested-clients': 3.997.6 + '@aws-sdk/token-providers': 3.1041.0 + '@aws-sdk/types': 3.973.8 + '@smithy/property-provider': 4.3.1 + '@smithy/shared-ini-file-loader': 4.5.1 + '@smithy/types': 4.14.1 + tslib: 2.8.1 + transitivePeerDependencies: + - aws-crt + + '@aws-sdk/credential-provider-web-identity@3.972.38': + dependencies: + '@aws-sdk/core': 3.974.8 + '@aws-sdk/nested-clients': 3.997.6 + '@aws-sdk/types': 3.973.8 + '@smithy/property-provider': 4.3.1 + '@smithy/shared-ini-file-loader': 4.5.1 + '@smithy/types': 4.14.1 + tslib: 2.8.1 + transitivePeerDependencies: + - aws-crt + + '@aws-sdk/lib-storage@3.1045.0(@aws-sdk/client-s3@3.1045.0)': + dependencies: + '@aws-sdk/client-s3': 3.1045.0 + '@smithy/middleware-endpoint': 4.5.1 + '@smithy/protocol-http': 5.4.1 + '@smithy/smithy-client': 4.13.1 + '@smithy/types': 4.14.1 + buffer: 5.6.0 + events: 3.3.0 + stream-browserify: 3.0.0 + tslib: 2.8.1 + + '@aws-sdk/middleware-bucket-endpoint@3.972.10': + dependencies: + '@aws-sdk/types': 3.973.8 + '@aws-sdk/util-arn-parser': 3.972.3 + '@smithy/node-config-provider': 4.4.1 + '@smithy/protocol-http': 5.4.1 + '@smithy/types': 4.14.1 + '@smithy/util-config-provider': 4.3.1 + tslib: 2.8.1 + + '@aws-sdk/middleware-expect-continue@3.972.10': + dependencies: + '@aws-sdk/types': 3.973.8 + '@smithy/protocol-http': 5.4.1 + '@smithy/types': 4.14.1 + tslib: 2.8.1 + + '@aws-sdk/middleware-flexible-checksums@3.974.16': + dependencies: + '@aws-crypto/crc32': 5.2.0 + '@aws-crypto/crc32c': 5.2.0 + '@aws-crypto/util': 5.2.0 + '@aws-sdk/core': 3.974.8 + '@aws-sdk/crc64-nvme': 3.972.7 + '@aws-sdk/types': 3.973.8 + '@smithy/is-array-buffer': 4.3.1 + '@smithy/node-config-provider': 4.4.1 + '@smithy/protocol-http': 5.4.1 + '@smithy/types': 4.14.1 + '@smithy/util-middleware': 4.3.1 + '@smithy/util-stream': 4.6.1 + '@smithy/util-utf8': 4.3.1 + tslib: 2.8.1 + + '@aws-sdk/middleware-host-header@3.972.10': + dependencies: + '@aws-sdk/types': 3.973.8 + '@smithy/protocol-http': 5.4.1 + '@smithy/types': 4.14.1 + tslib: 2.8.1 + + '@aws-sdk/middleware-location-constraint@3.972.10': + dependencies: + '@aws-sdk/types': 3.973.8 + '@smithy/types': 4.14.1 + tslib: 2.8.1 + + '@aws-sdk/middleware-logger@3.972.10': + dependencies: + '@aws-sdk/types': 3.973.8 + '@smithy/types': 4.14.1 + tslib: 2.8.1 + + '@aws-sdk/middleware-recursion-detection@3.972.11': + dependencies: + '@aws-sdk/types': 3.973.8 + '@aws/lambda-invoke-store': 0.2.4 + '@smithy/protocol-http': 5.4.1 + '@smithy/types': 4.14.1 + tslib: 2.8.1 + + '@aws-sdk/middleware-sdk-s3@3.972.37': + dependencies: + '@aws-sdk/core': 3.974.8 + '@aws-sdk/types': 3.973.8 + '@aws-sdk/util-arn-parser': 3.972.3 + '@smithy/core': 3.24.1 + '@smithy/node-config-provider': 4.4.1 + '@smithy/protocol-http': 5.4.1 + '@smithy/signature-v4': 5.4.1 + '@smithy/smithy-client': 4.13.1 + '@smithy/types': 4.14.1 + '@smithy/util-config-provider': 4.3.1 + '@smithy/util-middleware': 4.3.1 + '@smithy/util-stream': 4.6.1 + '@smithy/util-utf8': 4.3.1 + tslib: 2.8.1 + + '@aws-sdk/middleware-ssec@3.972.10': + dependencies: + '@aws-sdk/types': 3.973.8 + '@smithy/types': 4.14.1 + tslib: 2.8.1 + + '@aws-sdk/middleware-user-agent@3.972.38': + dependencies: + '@aws-sdk/core': 3.974.8 + '@aws-sdk/types': 3.973.8 + '@aws-sdk/util-endpoints': 3.996.8 + '@smithy/core': 3.24.1 + '@smithy/protocol-http': 5.4.1 + '@smithy/types': 4.14.1 + '@smithy/util-retry': 4.4.1 + tslib: 2.8.1 + + '@aws-sdk/nested-clients@3.997.6': + dependencies: + '@aws-crypto/sha256-browser': 5.2.0 + '@aws-crypto/sha256-js': 5.2.0 + '@aws-sdk/core': 3.974.8 + '@aws-sdk/middleware-host-header': 3.972.10 + '@aws-sdk/middleware-logger': 3.972.10 + '@aws-sdk/middleware-recursion-detection': 3.972.11 + '@aws-sdk/middleware-user-agent': 3.972.38 + '@aws-sdk/region-config-resolver': 3.972.13 + '@aws-sdk/signature-v4-multi-region': 3.996.25 + '@aws-sdk/types': 3.973.8 + '@aws-sdk/util-endpoints': 3.996.8 + '@aws-sdk/util-user-agent-browser': 3.972.10 + '@aws-sdk/util-user-agent-node': 3.973.24 + '@smithy/config-resolver': 4.5.1 + '@smithy/core': 3.24.1 + '@smithy/fetch-http-handler': 5.4.1 + '@smithy/hash-node': 4.3.1 + '@smithy/invalid-dependency': 4.3.1 + '@smithy/middleware-content-length': 4.3.1 + '@smithy/middleware-endpoint': 4.5.1 + '@smithy/middleware-retry': 4.6.1 + '@smithy/middleware-serde': 4.3.1 + '@smithy/middleware-stack': 4.3.1 + '@smithy/node-config-provider': 4.4.1 + '@smithy/node-http-handler': 4.7.1 + '@smithy/protocol-http': 5.4.1 + '@smithy/smithy-client': 4.13.1 + '@smithy/types': 4.14.1 + '@smithy/url-parser': 4.3.1 + '@smithy/util-base64': 4.4.1 + '@smithy/util-body-length-browser': 4.3.1 + '@smithy/util-body-length-node': 4.3.1 + '@smithy/util-defaults-mode-browser': 4.4.1 + '@smithy/util-defaults-mode-node': 4.3.1 + '@smithy/util-endpoints': 3.5.1 + '@smithy/util-middleware': 4.3.1 + '@smithy/util-retry': 4.4.1 + '@smithy/util-utf8': 4.3.1 + tslib: 2.8.1 + transitivePeerDependencies: + - aws-crt + + '@aws-sdk/region-config-resolver@3.972.13': + dependencies: + '@aws-sdk/types': 3.973.8 + '@smithy/config-resolver': 4.5.1 + '@smithy/node-config-provider': 4.4.1 + '@smithy/types': 4.14.1 + tslib: 2.8.1 + + '@aws-sdk/signature-v4-multi-region@3.996.25': + dependencies: + '@aws-sdk/middleware-sdk-s3': 3.972.37 + '@aws-sdk/types': 3.973.8 + '@smithy/protocol-http': 5.4.1 + '@smithy/signature-v4': 5.4.1 + '@smithy/types': 4.14.1 + tslib: 2.8.1 + + '@aws-sdk/token-providers@3.1041.0': + dependencies: + '@aws-sdk/core': 3.974.8 + '@aws-sdk/nested-clients': 3.997.6 + '@aws-sdk/types': 3.973.8 + '@smithy/property-provider': 4.3.1 + '@smithy/shared-ini-file-loader': 4.5.1 + '@smithy/types': 4.14.1 + tslib: 2.8.1 + transitivePeerDependencies: + - aws-crt + + '@aws-sdk/types@3.973.8': + dependencies: + '@smithy/types': 4.14.1 + tslib: 2.8.1 + + '@aws-sdk/util-arn-parser@3.972.3': + dependencies: + tslib: 2.8.1 + + '@aws-sdk/util-endpoints@3.996.8': + dependencies: + '@aws-sdk/types': 3.973.8 + '@smithy/types': 4.14.1 + '@smithy/url-parser': 4.3.1 + '@smithy/util-endpoints': 3.5.1 + tslib: 2.8.1 + + '@aws-sdk/util-locate-window@3.965.5': + dependencies: + tslib: 2.8.1 + + '@aws-sdk/util-user-agent-browser@3.972.10': + dependencies: + '@aws-sdk/types': 3.973.8 + '@smithy/types': 4.14.1 + bowser: 2.14.1 + tslib: 2.8.1 + + '@aws-sdk/util-user-agent-node@3.973.24': + dependencies: + '@aws-sdk/middleware-user-agent': 3.972.38 + '@aws-sdk/types': 3.973.8 + '@smithy/node-config-provider': 4.4.1 + '@smithy/types': 4.14.1 + '@smithy/util-config-provider': 4.3.1 + tslib: 2.8.1 + + '@aws-sdk/xml-builder@3.972.22': + dependencies: + '@nodable/entities': 2.1.0 + '@smithy/types': 4.14.1 + fast-xml-parser: 5.7.2 + tslib: 2.8.1 + + '@aws/lambda-invoke-store@0.2.4': {} + + '@clack/core@0.3.5': + dependencies: + picocolors: 1.1.1 + sisteransi: 1.0.5 + + '@clack/prompts@0.7.0': + dependencies: + '@clack/core': 0.3.5 + picocolors: 1.1.1 + sisteransi: 1.0.5 + + '@daytonaio/api-client@0.148.0': + dependencies: + axios: 1.16.0 + transitivePeerDependencies: + - debug + + '@daytonaio/sdk@0.148.0(ws@8.20.0)': + dependencies: + '@aws-sdk/client-s3': 3.1045.0 + '@aws-sdk/lib-storage': 3.1045.0(@aws-sdk/client-s3@3.1045.0) + '@daytonaio/api-client': 0.148.0 + '@daytonaio/toolbox-api-client': 0.148.0 + '@iarna/toml': 2.2.5 + '@opentelemetry/api': 1.9.1 + '@opentelemetry/exporter-trace-otlp-http': 0.207.0(@opentelemetry/api@1.9.1) + '@opentelemetry/instrumentation-http': 0.207.0(@opentelemetry/api@1.9.1) + '@opentelemetry/otlp-exporter-base': 0.207.0(@opentelemetry/api@1.9.1) + '@opentelemetry/resources': 2.2.0(@opentelemetry/api@1.9.1) + '@opentelemetry/sdk-node': 0.207.0(@opentelemetry/api@1.9.1) + '@opentelemetry/sdk-trace-base': 2.7.1(@opentelemetry/api@1.9.1) + '@opentelemetry/semantic-conventions': 1.40.0 + axios: 1.16.0 + busboy: 1.6.0 + dotenv: 17.4.2 + expand-tilde: 2.0.2 + fast-glob: 3.3.3 + form-data: 4.0.5 + isomorphic-ws: 5.0.0(ws@8.20.0) + pathe: 2.0.3 + shell-quote: 1.8.3 + tar: 7.5.15 + transitivePeerDependencies: + - aws-crt + - debug + - supports-color + - ws + + '@daytonaio/toolbox-api-client@0.148.0': + dependencies: + axios: 1.16.0 + transitivePeerDependencies: + - debug + + '@esbuild/aix-ppc64@0.25.12': + optional: true + + '@esbuild/android-arm64@0.25.12': + optional: true + + '@esbuild/android-arm@0.25.12': + optional: true + + '@esbuild/android-x64@0.25.12': + optional: true + + '@esbuild/darwin-arm64@0.25.12': + optional: true + + '@esbuild/darwin-x64@0.25.12': + optional: true + + '@esbuild/freebsd-arm64@0.25.12': + optional: true + + '@esbuild/freebsd-x64@0.25.12': + optional: true + + '@esbuild/linux-arm64@0.25.12': + optional: true + + '@esbuild/linux-arm@0.25.12': + optional: true + + '@esbuild/linux-ia32@0.25.12': + optional: true + + '@esbuild/linux-loong64@0.25.12': + optional: true + + '@esbuild/linux-mips64el@0.25.12': + optional: true + + '@esbuild/linux-ppc64@0.25.12': + optional: true + + '@esbuild/linux-riscv64@0.25.12': + optional: true + + '@esbuild/linux-s390x@0.25.12': + optional: true + + '@esbuild/linux-x64@0.25.12': + optional: true + + '@esbuild/netbsd-arm64@0.25.12': + optional: true + + '@esbuild/netbsd-x64@0.25.12': + optional: true + + '@esbuild/openbsd-arm64@0.25.12': + optional: true + + '@esbuild/openbsd-x64@0.25.12': + optional: true + + '@esbuild/openharmony-arm64@0.25.12': + optional: true + + '@esbuild/sunos-x64@0.25.12': + optional: true + + '@esbuild/win32-arm64@0.25.12': + optional: true + + '@esbuild/win32-ia32@0.25.12': + optional: true + + '@esbuild/win32-x64@0.25.12': + optional: true + + '@grpc/grpc-js@1.14.3': + dependencies: + '@grpc/proto-loader': 0.8.1 + '@js-sdsl/ordered-map': 4.4.2 + + '@grpc/proto-loader@0.8.1': + dependencies: + lodash.camelcase: 4.3.0 + long: 5.3.2 + protobufjs: 7.5.7 + yargs: 17.7.2 + + '@iarna/toml@2.2.5': {} + + '@isaacs/fs-minipass@4.0.1': + dependencies: + minipass: 7.1.3 + + '@js-sdsl/ordered-map@4.4.2': {} + + '@nodable/entities@2.1.0': {} + + '@nodelib/fs.scandir@2.1.5': + dependencies: + '@nodelib/fs.stat': 2.0.5 + run-parallel: 1.2.0 + + '@nodelib/fs.stat@2.0.5': {} + + '@nodelib/fs.walk@1.2.8': + dependencies: + '@nodelib/fs.scandir': 2.1.5 + fastq: 1.20.1 + + '@opentelemetry/api-logs@0.207.0': + dependencies: + '@opentelemetry/api': 1.9.1 + + '@opentelemetry/api@1.9.1': {} + + '@opentelemetry/context-async-hooks@2.2.0(@opentelemetry/api@1.9.1)': + dependencies: + '@opentelemetry/api': 1.9.1 + + '@opentelemetry/core@2.2.0(@opentelemetry/api@1.9.1)': + dependencies: + '@opentelemetry/api': 1.9.1 + '@opentelemetry/semantic-conventions': 1.40.0 + + '@opentelemetry/core@2.7.1(@opentelemetry/api@1.9.1)': + dependencies: + '@opentelemetry/api': 1.9.1 + '@opentelemetry/semantic-conventions': 1.40.0 + + '@opentelemetry/exporter-logs-otlp-grpc@0.207.0(@opentelemetry/api@1.9.1)': + dependencies: + '@grpc/grpc-js': 1.14.3 + '@opentelemetry/api': 1.9.1 + '@opentelemetry/core': 2.2.0(@opentelemetry/api@1.9.1) + '@opentelemetry/otlp-exporter-base': 0.207.0(@opentelemetry/api@1.9.1) + '@opentelemetry/otlp-grpc-exporter-base': 0.207.0(@opentelemetry/api@1.9.1) + '@opentelemetry/otlp-transformer': 0.207.0(@opentelemetry/api@1.9.1) + '@opentelemetry/sdk-logs': 0.207.0(@opentelemetry/api@1.9.1) + + '@opentelemetry/exporter-logs-otlp-http@0.207.0(@opentelemetry/api@1.9.1)': + dependencies: + '@opentelemetry/api': 1.9.1 + '@opentelemetry/api-logs': 0.207.0 + '@opentelemetry/core': 2.2.0(@opentelemetry/api@1.9.1) + '@opentelemetry/otlp-exporter-base': 0.207.0(@opentelemetry/api@1.9.1) + '@opentelemetry/otlp-transformer': 0.207.0(@opentelemetry/api@1.9.1) + '@opentelemetry/sdk-logs': 0.207.0(@opentelemetry/api@1.9.1) + + '@opentelemetry/exporter-logs-otlp-proto@0.207.0(@opentelemetry/api@1.9.1)': + dependencies: + '@opentelemetry/api': 1.9.1 + '@opentelemetry/api-logs': 0.207.0 + '@opentelemetry/core': 2.2.0(@opentelemetry/api@1.9.1) + '@opentelemetry/otlp-exporter-base': 0.207.0(@opentelemetry/api@1.9.1) + '@opentelemetry/otlp-transformer': 0.207.0(@opentelemetry/api@1.9.1) + '@opentelemetry/resources': 2.2.0(@opentelemetry/api@1.9.1) + '@opentelemetry/sdk-logs': 0.207.0(@opentelemetry/api@1.9.1) + '@opentelemetry/sdk-trace-base': 2.2.0(@opentelemetry/api@1.9.1) + + '@opentelemetry/exporter-metrics-otlp-grpc@0.207.0(@opentelemetry/api@1.9.1)': + dependencies: + '@grpc/grpc-js': 1.14.3 + '@opentelemetry/api': 1.9.1 + '@opentelemetry/core': 2.2.0(@opentelemetry/api@1.9.1) + '@opentelemetry/exporter-metrics-otlp-http': 0.207.0(@opentelemetry/api@1.9.1) + '@opentelemetry/otlp-exporter-base': 0.207.0(@opentelemetry/api@1.9.1) + '@opentelemetry/otlp-grpc-exporter-base': 0.207.0(@opentelemetry/api@1.9.1) + '@opentelemetry/otlp-transformer': 0.207.0(@opentelemetry/api@1.9.1) + '@opentelemetry/resources': 2.2.0(@opentelemetry/api@1.9.1) + '@opentelemetry/sdk-metrics': 2.2.0(@opentelemetry/api@1.9.1) + + '@opentelemetry/exporter-metrics-otlp-http@0.207.0(@opentelemetry/api@1.9.1)': + dependencies: + '@opentelemetry/api': 1.9.1 + '@opentelemetry/core': 2.2.0(@opentelemetry/api@1.9.1) + '@opentelemetry/otlp-exporter-base': 0.207.0(@opentelemetry/api@1.9.1) + '@opentelemetry/otlp-transformer': 0.207.0(@opentelemetry/api@1.9.1) + '@opentelemetry/resources': 2.2.0(@opentelemetry/api@1.9.1) + '@opentelemetry/sdk-metrics': 2.2.0(@opentelemetry/api@1.9.1) + + '@opentelemetry/exporter-metrics-otlp-proto@0.207.0(@opentelemetry/api@1.9.1)': + dependencies: + '@opentelemetry/api': 1.9.1 + '@opentelemetry/core': 2.2.0(@opentelemetry/api@1.9.1) + '@opentelemetry/exporter-metrics-otlp-http': 0.207.0(@opentelemetry/api@1.9.1) + '@opentelemetry/otlp-exporter-base': 0.207.0(@opentelemetry/api@1.9.1) + '@opentelemetry/otlp-transformer': 0.207.0(@opentelemetry/api@1.9.1) + '@opentelemetry/resources': 2.2.0(@opentelemetry/api@1.9.1) + '@opentelemetry/sdk-metrics': 2.2.0(@opentelemetry/api@1.9.1) + + '@opentelemetry/exporter-prometheus@0.207.0(@opentelemetry/api@1.9.1)': + dependencies: + '@opentelemetry/api': 1.9.1 + '@opentelemetry/core': 2.2.0(@opentelemetry/api@1.9.1) + '@opentelemetry/resources': 2.2.0(@opentelemetry/api@1.9.1) + '@opentelemetry/sdk-metrics': 2.2.0(@opentelemetry/api@1.9.1) + + '@opentelemetry/exporter-trace-otlp-grpc@0.207.0(@opentelemetry/api@1.9.1)': + dependencies: + '@grpc/grpc-js': 1.14.3 + '@opentelemetry/api': 1.9.1 + '@opentelemetry/core': 2.2.0(@opentelemetry/api@1.9.1) + '@opentelemetry/otlp-exporter-base': 0.207.0(@opentelemetry/api@1.9.1) + '@opentelemetry/otlp-grpc-exporter-base': 0.207.0(@opentelemetry/api@1.9.1) + '@opentelemetry/otlp-transformer': 0.207.0(@opentelemetry/api@1.9.1) + '@opentelemetry/resources': 2.2.0(@opentelemetry/api@1.9.1) + '@opentelemetry/sdk-trace-base': 2.2.0(@opentelemetry/api@1.9.1) + + '@opentelemetry/exporter-trace-otlp-http@0.207.0(@opentelemetry/api@1.9.1)': + dependencies: + '@opentelemetry/api': 1.9.1 + '@opentelemetry/core': 2.2.0(@opentelemetry/api@1.9.1) + '@opentelemetry/otlp-exporter-base': 0.207.0(@opentelemetry/api@1.9.1) + '@opentelemetry/otlp-transformer': 0.207.0(@opentelemetry/api@1.9.1) + '@opentelemetry/resources': 2.2.0(@opentelemetry/api@1.9.1) + '@opentelemetry/sdk-trace-base': 2.2.0(@opentelemetry/api@1.9.1) + + '@opentelemetry/exporter-trace-otlp-proto@0.207.0(@opentelemetry/api@1.9.1)': + dependencies: + '@opentelemetry/api': 1.9.1 + '@opentelemetry/core': 2.2.0(@opentelemetry/api@1.9.1) + '@opentelemetry/otlp-exporter-base': 0.207.0(@opentelemetry/api@1.9.1) + '@opentelemetry/otlp-transformer': 0.207.0(@opentelemetry/api@1.9.1) + '@opentelemetry/resources': 2.2.0(@opentelemetry/api@1.9.1) + '@opentelemetry/sdk-trace-base': 2.2.0(@opentelemetry/api@1.9.1) + + '@opentelemetry/exporter-zipkin@2.2.0(@opentelemetry/api@1.9.1)': + dependencies: + '@opentelemetry/api': 1.9.1 + '@opentelemetry/core': 2.2.0(@opentelemetry/api@1.9.1) + '@opentelemetry/resources': 2.2.0(@opentelemetry/api@1.9.1) + '@opentelemetry/sdk-trace-base': 2.2.0(@opentelemetry/api@1.9.1) + '@opentelemetry/semantic-conventions': 1.40.0 + + '@opentelemetry/instrumentation-http@0.207.0(@opentelemetry/api@1.9.1)': + dependencies: + '@opentelemetry/api': 1.9.1 + '@opentelemetry/core': 2.2.0(@opentelemetry/api@1.9.1) + '@opentelemetry/instrumentation': 0.207.0(@opentelemetry/api@1.9.1) + '@opentelemetry/semantic-conventions': 1.40.0 + forwarded-parse: 2.1.2 + transitivePeerDependencies: + - supports-color + + '@opentelemetry/instrumentation@0.207.0(@opentelemetry/api@1.9.1)': + dependencies: + '@opentelemetry/api': 1.9.1 + '@opentelemetry/api-logs': 0.207.0 + import-in-the-middle: 2.0.6 + require-in-the-middle: 8.0.1 + transitivePeerDependencies: + - supports-color + + '@opentelemetry/otlp-exporter-base@0.207.0(@opentelemetry/api@1.9.1)': + dependencies: + '@opentelemetry/api': 1.9.1 + '@opentelemetry/core': 2.2.0(@opentelemetry/api@1.9.1) + '@opentelemetry/otlp-transformer': 0.207.0(@opentelemetry/api@1.9.1) + + '@opentelemetry/otlp-grpc-exporter-base@0.207.0(@opentelemetry/api@1.9.1)': + dependencies: + '@grpc/grpc-js': 1.14.3 + '@opentelemetry/api': 1.9.1 + '@opentelemetry/core': 2.2.0(@opentelemetry/api@1.9.1) + '@opentelemetry/otlp-exporter-base': 0.207.0(@opentelemetry/api@1.9.1) + '@opentelemetry/otlp-transformer': 0.207.0(@opentelemetry/api@1.9.1) + + '@opentelemetry/otlp-transformer@0.207.0(@opentelemetry/api@1.9.1)': + dependencies: + '@opentelemetry/api': 1.9.1 + '@opentelemetry/api-logs': 0.207.0 + '@opentelemetry/core': 2.2.0(@opentelemetry/api@1.9.1) + '@opentelemetry/resources': 2.2.0(@opentelemetry/api@1.9.1) + '@opentelemetry/sdk-logs': 0.207.0(@opentelemetry/api@1.9.1) + '@opentelemetry/sdk-metrics': 2.2.0(@opentelemetry/api@1.9.1) + '@opentelemetry/sdk-trace-base': 2.2.0(@opentelemetry/api@1.9.1) + protobufjs: 7.5.7 + + '@opentelemetry/propagator-b3@2.2.0(@opentelemetry/api@1.9.1)': + dependencies: + '@opentelemetry/api': 1.9.1 + '@opentelemetry/core': 2.2.0(@opentelemetry/api@1.9.1) + + '@opentelemetry/propagator-jaeger@2.2.0(@opentelemetry/api@1.9.1)': + dependencies: + '@opentelemetry/api': 1.9.1 + '@opentelemetry/core': 2.2.0(@opentelemetry/api@1.9.1) - yoctocolors@2.1.2: - resolution: {integrity: sha512-CzhO+pFNo8ajLM2d2IW/R93ipy99LWjtwblvC1RsoSUMZgyLbYFr221TnSNT7GjGdYui6P459mw9JH/g/zW2ug==} - engines: {node: '>=18'} + '@opentelemetry/resources@2.2.0(@opentelemetry/api@1.9.1)': + dependencies: + '@opentelemetry/api': 1.9.1 + '@opentelemetry/core': 2.2.0(@opentelemetry/api@1.9.1) + '@opentelemetry/semantic-conventions': 1.40.0 - zod@3.25.76: - resolution: {integrity: sha512-gzUt/qt81nXsFGKIFcC3YnfEAx5NkunCfnDlvuBSSFS02bcXu4Lmea0AFIUwbLWxWPx3d9p8S5QoaujKcNQxcQ==} + '@opentelemetry/resources@2.7.1(@opentelemetry/api@1.9.1)': + dependencies: + '@opentelemetry/api': 1.9.1 + '@opentelemetry/core': 2.7.1(@opentelemetry/api@1.9.1) + '@opentelemetry/semantic-conventions': 1.40.0 -snapshots: + '@opentelemetry/sdk-logs@0.207.0(@opentelemetry/api@1.9.1)': + dependencies: + '@opentelemetry/api': 1.9.1 + '@opentelemetry/api-logs': 0.207.0 + '@opentelemetry/core': 2.2.0(@opentelemetry/api@1.9.1) + '@opentelemetry/resources': 2.2.0(@opentelemetry/api@1.9.1) - '@clack/core@0.3.5': + '@opentelemetry/sdk-metrics@2.2.0(@opentelemetry/api@1.9.1)': dependencies: - picocolors: 1.1.1 - sisteransi: 1.0.5 + '@opentelemetry/api': 1.9.1 + '@opentelemetry/core': 2.2.0(@opentelemetry/api@1.9.1) + '@opentelemetry/resources': 2.2.0(@opentelemetry/api@1.9.1) - '@clack/prompts@0.7.0': + '@opentelemetry/sdk-node@0.207.0(@opentelemetry/api@1.9.1)': dependencies: - '@clack/core': 0.3.5 - picocolors: 1.1.1 - sisteransi: 1.0.5 + '@opentelemetry/api': 1.9.1 + '@opentelemetry/api-logs': 0.207.0 + '@opentelemetry/core': 2.2.0(@opentelemetry/api@1.9.1) + '@opentelemetry/exporter-logs-otlp-grpc': 0.207.0(@opentelemetry/api@1.9.1) + '@opentelemetry/exporter-logs-otlp-http': 0.207.0(@opentelemetry/api@1.9.1) + '@opentelemetry/exporter-logs-otlp-proto': 0.207.0(@opentelemetry/api@1.9.1) + '@opentelemetry/exporter-metrics-otlp-grpc': 0.207.0(@opentelemetry/api@1.9.1) + '@opentelemetry/exporter-metrics-otlp-http': 0.207.0(@opentelemetry/api@1.9.1) + '@opentelemetry/exporter-metrics-otlp-proto': 0.207.0(@opentelemetry/api@1.9.1) + '@opentelemetry/exporter-prometheus': 0.207.0(@opentelemetry/api@1.9.1) + '@opentelemetry/exporter-trace-otlp-grpc': 0.207.0(@opentelemetry/api@1.9.1) + '@opentelemetry/exporter-trace-otlp-http': 0.207.0(@opentelemetry/api@1.9.1) + '@opentelemetry/exporter-trace-otlp-proto': 0.207.0(@opentelemetry/api@1.9.1) + '@opentelemetry/exporter-zipkin': 2.2.0(@opentelemetry/api@1.9.1) + '@opentelemetry/instrumentation': 0.207.0(@opentelemetry/api@1.9.1) + '@opentelemetry/propagator-b3': 2.2.0(@opentelemetry/api@1.9.1) + '@opentelemetry/propagator-jaeger': 2.2.0(@opentelemetry/api@1.9.1) + '@opentelemetry/resources': 2.2.0(@opentelemetry/api@1.9.1) + '@opentelemetry/sdk-logs': 0.207.0(@opentelemetry/api@1.9.1) + '@opentelemetry/sdk-metrics': 2.2.0(@opentelemetry/api@1.9.1) + '@opentelemetry/sdk-trace-base': 2.2.0(@opentelemetry/api@1.9.1) + '@opentelemetry/sdk-trace-node': 2.2.0(@opentelemetry/api@1.9.1) + '@opentelemetry/semantic-conventions': 1.40.0 + transitivePeerDependencies: + - supports-color + + '@opentelemetry/sdk-trace-base@2.2.0(@opentelemetry/api@1.9.1)': + dependencies: + '@opentelemetry/api': 1.9.1 + '@opentelemetry/core': 2.2.0(@opentelemetry/api@1.9.1) + '@opentelemetry/resources': 2.2.0(@opentelemetry/api@1.9.1) + '@opentelemetry/semantic-conventions': 1.40.0 + + '@opentelemetry/sdk-trace-base@2.7.1(@opentelemetry/api@1.9.1)': + dependencies: + '@opentelemetry/api': 1.9.1 + '@opentelemetry/core': 2.7.1(@opentelemetry/api@1.9.1) + '@opentelemetry/resources': 2.7.1(@opentelemetry/api@1.9.1) + '@opentelemetry/semantic-conventions': 1.40.0 + + '@opentelemetry/sdk-trace-node@2.2.0(@opentelemetry/api@1.9.1)': + dependencies: + '@opentelemetry/api': 1.9.1 + '@opentelemetry/context-async-hooks': 2.2.0(@opentelemetry/api@1.9.1) + '@opentelemetry/core': 2.2.0(@opentelemetry/api@1.9.1) + '@opentelemetry/sdk-trace-base': 2.2.0(@opentelemetry/api@1.9.1) + + '@opentelemetry/semantic-conventions@1.40.0': {} '@parcel/watcher-android-arm64@2.5.6': optional: true @@ -373,6 +2396,29 @@ snapshots: '@parcel/watcher-win32-ia32': 2.5.6 '@parcel/watcher-win32-x64': 2.5.6 + '@protobufjs/aspromise@1.1.2': {} + + '@protobufjs/base64@1.1.2': {} + + '@protobufjs/codegen@2.0.5': {} + + '@protobufjs/eventemitter@1.1.0': {} + + '@protobufjs/fetch@1.1.0': + dependencies: + '@protobufjs/aspromise': 1.1.2 + '@protobufjs/inquire': 1.1.1 + + '@protobufjs/float@1.0.2': {} + + '@protobufjs/inquire@1.1.1': {} + + '@protobufjs/path@1.1.2': {} + + '@protobufjs/pool@1.1.0': {} + + '@protobufjs/utf8@1.1.1': {} + '@relayburn/sdk-darwin-arm64@2.5.2': optional: true @@ -397,51 +2443,527 @@ snapshots: '@parcel/watcher': 2.5.6 ignore: 7.0.5 + '@smithy/config-resolver@4.5.1': + dependencies: + '@smithy/core': 3.24.1 + tslib: 2.8.1 + + '@smithy/core@3.24.1': + dependencies: + '@aws-crypto/crc32': 5.2.0 + '@smithy/types': 4.14.1 + tslib: 2.8.1 + + '@smithy/credential-provider-imds@4.3.1': + dependencies: + '@smithy/core': 3.24.1 + '@smithy/types': 4.14.1 + tslib: 2.8.1 + + '@smithy/eventstream-serde-browser@4.3.1': + dependencies: + '@smithy/core': 3.24.1 + tslib: 2.8.1 + + '@smithy/eventstream-serde-config-resolver@4.4.1': + dependencies: + '@smithy/core': 3.24.1 + tslib: 2.8.1 + + '@smithy/eventstream-serde-node@4.3.1': + dependencies: + '@smithy/core': 3.24.1 + tslib: 2.8.1 + + '@smithy/fetch-http-handler@5.4.1': + dependencies: + '@smithy/core': 3.24.1 + '@smithy/types': 4.14.1 + tslib: 2.8.1 + + '@smithy/hash-blob-browser@4.3.1': + dependencies: + '@smithy/core': 3.24.1 + tslib: 2.8.1 + + '@smithy/hash-node@4.3.1': + dependencies: + '@smithy/core': 3.24.1 + tslib: 2.8.1 + + '@smithy/hash-stream-node@4.3.1': + dependencies: + '@smithy/core': 3.24.1 + tslib: 2.8.1 + + '@smithy/invalid-dependency@4.3.1': + dependencies: + '@smithy/core': 3.24.1 + tslib: 2.8.1 + + '@smithy/is-array-buffer@2.2.0': + dependencies: + tslib: 2.8.1 + + '@smithy/is-array-buffer@4.3.1': + dependencies: + '@smithy/core': 3.24.1 + tslib: 2.8.1 + + '@smithy/md5-js@4.3.1': + dependencies: + '@smithy/core': 3.24.1 + tslib: 2.8.1 + + '@smithy/middleware-content-length@4.3.1': + dependencies: + '@smithy/core': 3.24.1 + tslib: 2.8.1 + + '@smithy/middleware-endpoint@4.5.1': + dependencies: + '@smithy/core': 3.24.1 + tslib: 2.8.1 + + '@smithy/middleware-retry@4.6.1': + dependencies: + '@smithy/core': 3.24.1 + tslib: 2.8.1 + + '@smithy/middleware-serde@4.3.1': + dependencies: + '@smithy/core': 3.24.1 + tslib: 2.8.1 + + '@smithy/middleware-stack@4.3.1': + dependencies: + '@smithy/core': 3.24.1 + tslib: 2.8.1 + + '@smithy/node-config-provider@4.4.1': + dependencies: + '@smithy/core': 3.24.1 + tslib: 2.8.1 + + '@smithy/node-http-handler@4.7.1': + dependencies: + '@smithy/core': 3.24.1 + '@smithy/types': 4.14.1 + tslib: 2.8.1 + + '@smithy/property-provider@4.3.1': + dependencies: + '@smithy/core': 3.24.1 + tslib: 2.8.1 + + '@smithy/protocol-http@5.4.1': + dependencies: + '@smithy/core': 3.24.1 + tslib: 2.8.1 + + '@smithy/shared-ini-file-loader@4.5.1': + dependencies: + '@smithy/core': 3.24.1 + tslib: 2.8.1 + + '@smithy/signature-v4@5.4.1': + dependencies: + '@smithy/core': 3.24.1 + '@smithy/types': 4.14.1 + tslib: 2.8.1 + + '@smithy/smithy-client@4.13.1': + dependencies: + '@smithy/core': 3.24.1 + '@smithy/types': 4.14.1 + tslib: 2.8.1 + + '@smithy/types@4.14.1': + dependencies: + tslib: 2.8.1 + + '@smithy/url-parser@4.3.1': + dependencies: + '@smithy/core': 3.24.1 + tslib: 2.8.1 + + '@smithy/util-base64@4.4.1': + dependencies: + '@smithy/core': 3.24.1 + tslib: 2.8.1 + + '@smithy/util-body-length-browser@4.3.1': + dependencies: + '@smithy/core': 3.24.1 + tslib: 2.8.1 + + '@smithy/util-body-length-node@4.3.1': + dependencies: + '@smithy/core': 3.24.1 + tslib: 2.8.1 + + '@smithy/util-buffer-from@2.2.0': + dependencies: + '@smithy/is-array-buffer': 2.2.0 + tslib: 2.8.1 + + '@smithy/util-config-provider@4.3.1': + dependencies: + '@smithy/core': 3.24.1 + tslib: 2.8.1 + + '@smithy/util-defaults-mode-browser@4.4.1': + dependencies: + '@smithy/core': 3.24.1 + tslib: 2.8.1 + + '@smithy/util-defaults-mode-node@4.3.1': + dependencies: + '@smithy/core': 3.24.1 + tslib: 2.8.1 + + '@smithy/util-endpoints@3.5.1': + dependencies: + '@smithy/core': 3.24.1 + tslib: 2.8.1 + + '@smithy/util-middleware@4.3.1': + dependencies: + '@smithy/core': 3.24.1 + tslib: 2.8.1 + + '@smithy/util-retry@4.4.1': + dependencies: + '@smithy/core': 3.24.1 + tslib: 2.8.1 + + '@smithy/util-stream@4.6.1': + dependencies: + '@smithy/core': 3.24.1 + tslib: 2.8.1 + + '@smithy/util-utf8@2.3.0': + dependencies: + '@smithy/util-buffer-from': 2.2.0 + tslib: 2.8.1 + + '@smithy/util-utf8@4.3.1': + dependencies: + '@smithy/core': 3.24.1 + tslib: 2.8.1 + + '@smithy/util-waiter@4.4.1': + dependencies: + '@smithy/core': 3.24.1 + tslib: 2.8.1 + '@types/node@22.19.15': dependencies: undici-types: 6.21.0 + acorn-import-attributes@1.9.5(acorn@8.16.0): + dependencies: + acorn: 8.16.0 + + acorn@8.16.0: {} + agent-trajectories@0.5.3: dependencies: '@clack/prompts': 0.7.0 commander: 12.1.0 zod: 3.25.76 + ansi-regex@5.0.1: {} + ansi-regex@6.2.2: {} + ansi-styles@4.3.0: + dependencies: + color-convert: 2.0.1 + + asynckit@0.4.0: {} + + axios@1.16.0: + dependencies: + follow-redirects: 1.16.0 + form-data: 4.0.5 + proxy-from-env: 2.1.0 + transitivePeerDependencies: + - debug + + base64-js@1.5.1: {} + + bowser@2.14.1: {} + + braces@3.0.3: + dependencies: + fill-range: 7.1.1 + + buffer@5.6.0: + dependencies: + base64-js: 1.5.1 + ieee754: 1.2.1 + + busboy@1.6.0: + dependencies: + streamsearch: 1.1.0 + + call-bind-apply-helpers@1.0.2: + dependencies: + es-errors: 1.3.0 + function-bind: 1.1.2 + chalk@5.6.2: {} + chownr@3.0.0: {} + + cjs-module-lexer@2.2.0: {} + cli-cursor@5.0.0: dependencies: restore-cursor: 5.1.0 cli-spinners@3.4.0: {} + cliui@8.0.1: + dependencies: + string-width: 4.2.3 + strip-ansi: 6.0.1 + wrap-ansi: 7.0.0 + + color-convert@2.0.1: + dependencies: + color-name: 1.1.4 + + color-name@1.1.4: {} + + combined-stream@1.0.8: + dependencies: + delayed-stream: 1.0.0 + commander@12.1.0: {} + debug@4.4.3: + dependencies: + ms: 2.1.3 + + delayed-stream@1.0.0: {} + detect-libc@2.1.2: {} + dotenv@17.4.2: {} + + dunder-proto@1.0.1: + dependencies: + call-bind-apply-helpers: 1.0.2 + es-errors: 1.3.0 + gopd: 1.2.0 + + emoji-regex@8.0.0: {} + + es-define-property@1.0.1: {} + + es-errors@1.3.0: {} + + es-object-atoms@1.1.1: + dependencies: + es-errors: 1.3.0 + + es-set-tostringtag@2.1.0: + dependencies: + es-errors: 1.3.0 + get-intrinsic: 1.3.0 + has-tostringtag: 1.0.2 + hasown: 2.0.3 + + esbuild@0.25.12: + optionalDependencies: + '@esbuild/aix-ppc64': 0.25.12 + '@esbuild/android-arm': 0.25.12 + '@esbuild/android-arm64': 0.25.12 + '@esbuild/android-x64': 0.25.12 + '@esbuild/darwin-arm64': 0.25.12 + '@esbuild/darwin-x64': 0.25.12 + '@esbuild/freebsd-arm64': 0.25.12 + '@esbuild/freebsd-x64': 0.25.12 + '@esbuild/linux-arm': 0.25.12 + '@esbuild/linux-arm64': 0.25.12 + '@esbuild/linux-ia32': 0.25.12 + '@esbuild/linux-loong64': 0.25.12 + '@esbuild/linux-mips64el': 0.25.12 + '@esbuild/linux-ppc64': 0.25.12 + '@esbuild/linux-riscv64': 0.25.12 + '@esbuild/linux-s390x': 0.25.12 + '@esbuild/linux-x64': 0.25.12 + '@esbuild/netbsd-arm64': 0.25.12 + '@esbuild/netbsd-x64': 0.25.12 + '@esbuild/openbsd-arm64': 0.25.12 + '@esbuild/openbsd-x64': 0.25.12 + '@esbuild/openharmony-arm64': 0.25.12 + '@esbuild/sunos-x64': 0.25.12 + '@esbuild/win32-arm64': 0.25.12 + '@esbuild/win32-ia32': 0.25.12 + '@esbuild/win32-x64': 0.25.12 + + escalade@3.2.0: {} + + events@3.3.0: {} + + expand-tilde@2.0.2: + dependencies: + homedir-polyfill: 1.0.3 + + fast-glob@3.3.3: + dependencies: + '@nodelib/fs.stat': 2.0.5 + '@nodelib/fs.walk': 1.2.8 + glob-parent: 5.1.2 + merge2: 1.4.1 + micromatch: 4.0.8 + + fast-xml-builder@1.2.0: + dependencies: + path-expression-matcher: 1.5.0 + xml-naming: 0.1.0 + + fast-xml-parser@5.7.2: + dependencies: + '@nodable/entities': 2.1.0 + fast-xml-builder: 1.2.0 + path-expression-matcher: 1.5.0 + strnum: 2.3.0 + + fastq@1.20.1: + dependencies: + reusify: 1.1.0 + + fill-range@7.1.1: + dependencies: + to-regex-range: 5.0.1 + + follow-redirects@1.16.0: {} + + form-data@4.0.5: + dependencies: + asynckit: 0.4.0 + combined-stream: 1.0.8 + es-set-tostringtag: 2.1.0 + hasown: 2.0.3 + mime-types: 2.1.35 + + forwarded-parse@2.1.2: {} + + function-bind@1.1.2: {} + + get-caller-file@2.0.5: {} + get-east-asian-width@1.5.0: {} + get-intrinsic@1.3.0: + dependencies: + call-bind-apply-helpers: 1.0.2 + es-define-property: 1.0.1 + es-errors: 1.3.0 + es-object-atoms: 1.1.1 + function-bind: 1.1.2 + get-proto: 1.0.1 + gopd: 1.2.0 + has-symbols: 1.1.0 + hasown: 2.0.3 + math-intrinsics: 1.1.0 + + get-proto@1.0.1: + dependencies: + dunder-proto: 1.0.1 + es-object-atoms: 1.1.1 + + glob-parent@5.1.2: + dependencies: + is-glob: 4.0.3 + + gopd@1.2.0: {} + + has-symbols@1.1.0: {} + + has-tostringtag@1.0.2: + dependencies: + has-symbols: 1.1.0 + + hasown@2.0.3: + dependencies: + function-bind: 1.1.2 + + homedir-polyfill@1.0.3: + dependencies: + parse-passwd: 1.0.0 + + ieee754@1.2.1: {} + ignore@7.0.5: {} + import-in-the-middle@2.0.6: + dependencies: + acorn: 8.16.0 + acorn-import-attributes: 1.9.5(acorn@8.16.0) + cjs-module-lexer: 2.2.0 + module-details-from-path: 1.0.4 + + inherits@2.0.4: {} + is-extglob@2.1.1: {} + is-fullwidth-code-point@3.0.0: {} + is-glob@4.0.3: dependencies: is-extglob: 2.1.1 is-interactive@2.0.0: {} + is-number@7.0.0: {} + is-unicode-supported@2.1.0: {} + isomorphic-ws@5.0.0(ws@8.20.0): + dependencies: + ws: 8.20.0 + + lodash.camelcase@4.3.0: {} + log-symbols@7.0.1: dependencies: is-unicode-supported: 2.1.0 yoctocolors: 2.1.2 + long@5.3.2: {} + + math-intrinsics@1.1.0: {} + + merge2@1.4.1: {} + + micromatch@4.0.8: + dependencies: + braces: 3.0.3 + picomatch: 2.3.2 + + mime-db@1.52.0: {} + + mime-types@2.1.35: + dependencies: + mime-db: 1.52.0 + mimic-function@5.0.1: {} + minipass@7.1.3: {} + + minizlib@3.1.0: + dependencies: + minipass: 7.1.3 + + module-details-from-path@1.0.4: {} + + ms@2.1.3: {} + node-addon-api@7.1.1: {} onetime@7.0.0: @@ -459,34 +2981,151 @@ snapshots: stdin-discarder: 0.3.2 string-width: 8.2.0 + parse-passwd@1.0.0: {} + + path-expression-matcher@1.5.0: {} + + pathe@2.0.3: {} + picocolors@1.1.1: {} + picomatch@2.3.2: {} + picomatch@4.0.4: {} + protobufjs@7.5.7: + dependencies: + '@protobufjs/aspromise': 1.1.2 + '@protobufjs/base64': 1.1.2 + '@protobufjs/codegen': 2.0.5 + '@protobufjs/eventemitter': 1.1.0 + '@protobufjs/fetch': 1.1.0 + '@protobufjs/float': 1.0.2 + '@protobufjs/inquire': 1.1.1 + '@protobufjs/path': 1.1.2 + '@protobufjs/pool': 1.1.0 + '@protobufjs/utf8': 1.1.1 + '@types/node': 22.19.15 + long: 5.3.2 + + proxy-from-env@2.1.0: {} + + queue-microtask@1.2.3: {} + + readable-stream@3.6.2: + dependencies: + inherits: 2.0.4 + string_decoder: 1.3.0 + util-deprecate: 1.0.2 + + require-directory@2.1.1: {} + + require-in-the-middle@8.0.1: + dependencies: + debug: 4.4.3 + module-details-from-path: 1.0.4 + transitivePeerDependencies: + - supports-color + restore-cursor@5.1.0: dependencies: onetime: 7.0.0 signal-exit: 4.1.0 + reusify@1.1.0: {} + + run-parallel@1.2.0: + dependencies: + queue-microtask: 1.2.3 + + safe-buffer@5.2.1: {} + + shell-quote@1.8.3: {} + signal-exit@4.1.0: {} sisteransi@1.0.5: {} stdin-discarder@0.3.2: {} + stream-browserify@3.0.0: + dependencies: + inherits: 2.0.4 + readable-stream: 3.6.2 + + streamsearch@1.1.0: {} + + string-width@4.2.3: + dependencies: + emoji-regex: 8.0.0 + is-fullwidth-code-point: 3.0.0 + strip-ansi: 6.0.1 + string-width@8.2.0: dependencies: get-east-asian-width: 1.5.0 strip-ansi: 7.2.0 + string_decoder@1.3.0: + dependencies: + safe-buffer: 5.2.1 + + strip-ansi@6.0.1: + dependencies: + ansi-regex: 5.0.1 + strip-ansi@7.2.0: dependencies: ansi-regex: 6.2.2 + strnum@2.3.0: {} + + tar@7.5.15: + dependencies: + '@isaacs/fs-minipass': 4.0.1 + chownr: 3.0.0 + minipass: 7.1.3 + minizlib: 3.1.0 + yallist: 5.0.0 + + to-regex-range@5.0.1: + dependencies: + is-number: 7.0.0 + + tslib@2.8.1: {} + typescript@5.9.3: {} undici-types@6.21.0: {} + util-deprecate@1.0.2: {} + + wrap-ansi@7.0.0: + dependencies: + ansi-styles: 4.3.0 + string-width: 4.2.3 + strip-ansi: 6.0.1 + + ws@8.20.0: {} + + xml-naming@0.1.0: {} + + y18n@5.0.8: {} + + yallist@5.0.0: {} + + yargs-parser@21.1.1: {} + + yargs@17.7.2: + dependencies: + cliui: 8.0.1 + escalade: 3.2.0 + get-caller-file: 2.0.5 + require-directory: 2.1.1 + string-width: 4.2.3 + y18n: 5.0.8 + yargs-parser: 21.1.1 + yoctocolors@2.1.2: {} zod@3.25.76: {} From 5e419df6842c35cac15b357987dae9eef9423eb7 Mon Sep 17 00:00:00 2001 From: Khaliq Date: Tue, 12 May 2026 12:12:30 +0200 Subject: [PATCH 2/4] fix(deploy): address PR review feedback + unblock examples typecheck MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit CI fix - examples/tsconfig.json: add path mappings for @agentworkforce/runtime (incl. /runner, /clients, /raw) and @agentworkforce/persona-kit so example imports resolve against the workspace sources. Unblocks the failing `pnpm run typecheck:examples` step. Real bugs flagged in review - deploy/modes/dev.ts: SIGKILL escalation now checks exitCode/signalCode instead of child.killed — the latter flips true immediately after the initial SIGTERM, so the old check never escalated stuck children. - deploy/modes/sandbox.ts: drop the explicit `@latest` pin from the in-sandbox `npm install`; let npm resolve the version declared in the staged bundle's package.json so the running runtime matches the bundle's intended version. - runtime/shim.ts: reject envelopes like `github.` whose source is valid but whose event-name suffix is empty. Returns null so the runner logs the envelope as unsupported rather than dispatching an empty-typed event to the handler. - runtime/ctx.ts: refuse to attach integration clients that collide with core ctx fields (`harness`, `sandbox`, etc.). A malformed persona declaring an integration named `harness` no longer silently shadows `ctx.harness.run`. - runtime/clients/github.ts: route the diff fetch through the canonical `/repos/:owner/:repo/pulls/:number` endpoint via the authed request helper, instead of trusting `pr.diff_url`. Keeps the bearer token scoped to the configured API host. - persona-kit/parse.ts: reject Windows-absolute onEvent paths (`C:\...`, `C:/...`, `\\server\share\...`) alongside POSIX `/abs`. - persona-kit/parse.ts: trim schedule name/cron/tz before validating and deduping so `"weekly"` and `" weekly "` collapse correctly. - runtime/runner.ts: log the malformed-tail JSON parse failure with an excerpt instead of silently swallowing it. - deploy/io.ts: open a short-lived readline.Interface per question and close it in finally — keeps stdin from staying in raw mode and pinning the event loop open after the prompt completes. - deploy/login.ts: trim whitespace-only workspace/token env values to "missing" so they fail with a clear setup error instead of a confusing downstream 401. - deploy/connect.ts: drop the `'(already-connected)'` sentinel string; leave subscriptionProvider undefined when no connect ran. - cli/cli.ts: add explicit `return` after deploy/login handlers so the `subcommand !== 'agent'` check below doesn't fire wrongly. - cli/deploy-command.ts: expectValue now rejects flag tokens (`--workspace --detach` fails loudly rather than treating `--detach` as the workspace name). - persona-kit/triggers.ts: strip user-specific absolute path from the registry doc comment. Examples - examples/weekly-digest/agent.ts: validate WEEKLY_DIGEST_REPO is exactly `owner/repo` (two non-empty segments) before destructuring. - examples/weekly-digest/persona.json: drop the hardcoded GitHub scope.repo so the env-configurable WEEKLY_DIGEST_REPO stays the single source of truth. Tests - New regression coverage for: Windows-absolute onEvent rejection, schedule field trimming + dedupe across whitespace variants, shim null on empty provider-event suffix, buildCtx core-field collision guard, github.getPr fetching the diff through the configured API host instead of the untrusted diff_url. 379 tests pass (+18). Co-Authored-By: Claude Opus 4.7 (1M context) --- examples/tsconfig.json | 10 +++++- examples/weekly-digest/agent.ts | 9 +++-- examples/weekly-digest/persona.json | 4 +-- packages/cli/src/cli.ts | 2 ++ packages/cli/src/deploy-command.ts | 6 ++++ packages/deploy/src/connect.ts | 4 ++- packages/deploy/src/io.ts | 20 ++++++----- packages/deploy/src/login.ts | 8 +++-- packages/deploy/src/modes/dev.ts | 8 ++++- packages/deploy/src/modes/sandbox.ts | 10 +++--- packages/persona-kit/src/parse.test.ts | 37 +++++++++++++++++++ packages/persona-kit/src/parse.ts | 28 ++++++++++----- packages/persona-kit/src/triggers.ts | 7 ++-- packages/runtime/src/clients/github.test.ts | 34 ++++++++++++++++++ packages/runtime/src/clients/github.ts | 40 +++++++++------------ packages/runtime/src/ctx.ts | 23 ++++++++++++ packages/runtime/src/runner.test.ts | 15 ++++++++ packages/runtime/src/runner.ts | 13 +++++-- packages/runtime/src/shim.test.ts | 7 ++++ packages/runtime/src/shim.ts | 8 ++++- 20 files changed, 229 insertions(+), 64 deletions(-) diff --git a/examples/tsconfig.json b/examples/tsconfig.json index bdb1e3d..2eaae53 100644 --- a/examples/tsconfig.json +++ b/examples/tsconfig.json @@ -1,7 +1,15 @@ { "extends": "../tsconfig.base.json", "compilerOptions": { - "noEmit": true + "noEmit": true, + "baseUrl": ".", + "paths": { + "@agentworkforce/runtime": ["../packages/runtime/src/index.ts"], + "@agentworkforce/runtime/runner": ["../packages/runtime/src/runner.ts"], + "@agentworkforce/runtime/clients": ["../packages/runtime/src/clients/index.ts"], + "@agentworkforce/runtime/raw": ["../packages/runtime/src/raw.ts"], + "@agentworkforce/persona-kit": ["../packages/persona-kit/src/index.ts"] + } }, "include": ["./**/*.ts"] } diff --git a/examples/weekly-digest/agent.ts b/examples/weekly-digest/agent.ts index 0474221..11a652e 100644 --- a/examples/weekly-digest/agent.ts +++ b/examples/weekly-digest/agent.ts @@ -57,10 +57,13 @@ export default handler(async (ctx, event) => { } const body = renderDigest({ week: isoWeek, fetchedAt, topics, clusters }); - const [owner, repo] = config.repo.split('/'); - if (!owner || !repo) { - throw new Error(`weekly-digest: WEEKLY_DIGEST_REPO must be "owner/repo"; got "${config.repo}"`); + const repoSegments = config.repo.split('/'); + if (repoSegments.length !== 2 || !repoSegments[0].trim() || !repoSegments[1].trim()) { + throw new Error( + `weekly-digest: WEEKLY_DIGEST_REPO must be exactly "owner/repo"; got "${config.repo}"` + ); } + const [owner, repo] = repoSegments as [string, string]; const result = await github.upsertIssue({ owner, diff --git a/examples/weekly-digest/persona.json b/examples/weekly-digest/persona.json index 8ec35c5..765a739 100644 --- a/examples/weekly-digest/persona.json +++ b/examples/weekly-digest/persona.json @@ -5,9 +5,7 @@ "description": "Weekly competitive-intel digest. Searches the web for mentions of configured topics, dedupes and clusters by source domain, and upserts a single GitHub issue per ISO week.", "cloud": true, "integrations": { - "github": { - "scope": { "repo": "AgentWorkforce/weekly-digest" } - } + "github": {} }, "schedules": [ { "name": "weekly", "cron": "0 9 * * 6", "tz": "UTC" } diff --git a/packages/cli/src/cli.ts b/packages/cli/src/cli.ts index ef9cd68..a3ab88d 100644 --- a/packages/cli/src/cli.ts +++ b/packages/cli/src/cli.ts @@ -3819,10 +3819,12 @@ export async function main(): Promise { if (subcommand === 'deploy') { await runDeploy(rest); + return; } if (subcommand === 'login') { await runLogin(rest); + return; } if (subcommand !== 'agent') { diff --git a/packages/cli/src/deploy-command.ts b/packages/cli/src/deploy-command.ts index ba2dcee..e9e37e9 100644 --- a/packages/cli/src/deploy-command.ts +++ b/packages/cli/src/deploy-command.ts @@ -161,6 +161,12 @@ function expectValue(flag: string, value: string | undefined): string { if (typeof value !== 'string' || !value.trim()) { die(`${flag}: missing value`); } + // Reject the next token if it looks like a flag — `--workspace --detach` + // should fail loudly rather than silently treating `--detach` as the + // workspace name. + if (value.startsWith('-')) { + die(`${flag}: missing value (got "${value}", which looks like a flag)`); + } return value; } diff --git a/packages/deploy/src/connect.ts b/packages/deploy/src/connect.ts index 9998b66..693a5e8 100644 --- a/packages/deploy/src/connect.ts +++ b/packages/deploy/src/connect.ts @@ -157,6 +157,9 @@ export async function connectIntegrations(input: ConnectAllInput): Promise { + const rl = readline.createInterface({ input: process.stdin, output: process.stdout }); + try { + return await rl.question(question); + } finally { + rl.close(); } - return rl; } return { @@ -28,13 +32,13 @@ export function createTerminalIO(): DeployIO { }, async prompt(question, opts = {}) { const suffix = opts.defaultValue !== undefined ? ` [${opts.defaultValue}]` : ''; - const answer = (await ensureReadline().question(`${question}${suffix} `)).trim(); + const answer = (await ask(`${question}${suffix} `)).trim(); return answer.length > 0 ? answer : opts.defaultValue ?? ''; }, async confirm(question, opts = {}) { const def = opts.defaultValue ?? false; const suffix = def ? ' [Y/n]' : ' [y/N]'; - const answer = (await ensureReadline().question(`${question}${suffix} `)).trim().toLowerCase(); + const answer = (await ask(`${question}${suffix} `)).trim().toLowerCase(); if (answer === '') return def; return answer === 'y' || answer === 'yes'; } diff --git a/packages/deploy/src/login.ts b/packages/deploy/src/login.ts index 71d4e66..4756310 100644 --- a/packages/deploy/src/login.ts +++ b/packages/deploy/src/login.ts @@ -22,8 +22,12 @@ export interface WorkspaceAuth { export function envWorkspaceAuth(): WorkspaceAuth { return { async resolveWorkspace({ override, io }) { - const workspace = override ?? process.env.WORKFORCE_WORKSPACE_ID; - const token = process.env.WORKFORCE_WORKSPACE_TOKEN; + // Normalize whitespace-only values to "missing" — a token of `" "` + // is no more usable than an empty string, and silently passing one + // through produces a confusing 401 later instead of a clear setup + // error here. + const workspace = (override ?? process.env.WORKFORCE_WORKSPACE_ID ?? '').trim(); + const token = (process.env.WORKFORCE_WORKSPACE_TOKEN ?? '').trim(); if (!workspace) { io.error( 'no workspace resolved: pass --workspace, set WORKFORCE_WORKSPACE_ID, or run `workforce login`' diff --git a/packages/deploy/src/modes/dev.ts b/packages/deploy/src/modes/dev.ts index 0a1b2ce..8282a39 100644 --- a/packages/deploy/src/modes/dev.ts +++ b/packages/deploy/src/modes/dev.ts @@ -83,8 +83,14 @@ export const devLauncher: ModeLauncher = { stopping = true; child.stdin?.end(); child.kill('SIGTERM'); + // `child.killed` flips true the moment `kill()` delivers the signal, + // regardless of whether the child has actually exited. To detect a + // stuck child we have to look at the real lifecycle markers — both + // exitCode and signalCode stay null until the OS reaps the process. const escalation = setTimeout(() => { - if (!child.killed) child.kill('SIGKILL'); + if (child.exitCode === null && child.signalCode === null) { + child.kill('SIGKILL'); + } }, SIGTERM_TO_SIGKILL_MS).unref(); try { await done; diff --git a/packages/deploy/src/modes/sandbox.ts b/packages/deploy/src/modes/sandbox.ts index 02a5658..f6aebab 100644 --- a/packages/deploy/src/modes/sandbox.ts +++ b/packages/deploy/src/modes/sandbox.ts @@ -144,12 +144,12 @@ async function uploadBundle(sandbox: Sandbox, input: ModeLaunchInput): Promise { assert.throws(() => parseOnEvent('', 'onEvent'), /must be a non-empty string/); }); +test('parseOnEvent rejects Windows-absolute path shapes too', () => { + assert.throws( + () => parseOnEvent('C:\\handlers\\agent.ts', 'onEvent'), + /must be a relative POSIX path/ + ); + assert.throws( + () => parseOnEvent('C:/handlers/agent.ts', 'onEvent'), + /must be a relative POSIX path/ + ); + assert.throws( + () => parseOnEvent('\\\\server\\share\\agent.ts', 'onEvent'), + /must be a relative POSIX path/ + ); +}); + +test('parseSchedules trims name/cron/tz before validation and dedupe', () => { + // Whitespace variants of the same name should collapse so dedupe works. + assert.throws( + () => + parseSchedules( + [ + { name: 'weekly', cron: '0 9 * * 6' }, + { name: ' weekly ', cron: '0 10 * * 6' } + ], + 'schedules' + ), + /duplicates an earlier schedule/ + ); + const s = parseSchedules( + [{ name: ' morning ', cron: ' 0 9 * * 1-5 ', tz: ' America/New_York ' }], + 'schedules' + ); + assert.deepEqual(s, [ + { name: 'morning', cron: '0 9 * * 1-5', tz: 'America/New_York' } + ]); +}); + test('parsePersonaSpec rejects non-boolean cloud / useSubscription', () => { assert.throws( () => parsePersonaSpec(validSpec({ cloud: 'yes' }), 'documentation'), diff --git a/packages/persona-kit/src/parse.ts b/packages/persona-kit/src/parse.ts index 99cda84..3425842 100644 --- a/packages/persona-kit/src/parse.ts +++ b/packages/persona-kit/src/parse.ts @@ -480,11 +480,18 @@ const ONEVENT_EXT_RE = /\.(?:ts|tsx|mts|cts|js|mjs|cjs)$/i; // runtime errors rather than silently passing through here. const CRON_FIELD_RE = /^(?:\*|(?:\d+(?:-\d+)?)(?:,\d+(?:-\d+)?)*)(?:\/\d+)?$/; +// Windows absolute path shapes the parser must reject in addition to +// the POSIX "/abs" form. Drive-letter (`C:\…`, `C:/…`) and UNC +// (`\\server\share`, `//server/share`) forms both qualify as absolute +// for our purposes — personas need to stay portable, so onEvent paths +// must be relative to the persona JSON's directory. +const WIN_ABSOLUTE_RE = /^(?:[A-Za-z]:[\\/]|[\\/]{2})/; + function assertOnEventPath(value: unknown, context: string): string { if (typeof value !== 'string' || !value.trim()) { throw new Error(`${context} must be a non-empty string`); } - if (value.startsWith('/')) { + if (value.startsWith('/') || WIN_ABSOLUTE_RE.test(value)) { throw new Error(`${context} must be a relative POSIX path; got absolute "${value}"`); } const segments = value.split(/[\\/]+/); @@ -601,6 +608,9 @@ export function parseSchedules( } if (value.length === 0) return undefined; + // Trim every field before validating + deduping so stray whitespace + // does not leak into schedule ids (used as `event.name`) or bypass + // the duplicate-name guard (e.g. `"weekly"` vs `"weekly "`). const seenNames = new Set(); const out: PersonaSchedule[] = []; for (const [idx, entry] of value.entries()) { @@ -612,21 +622,23 @@ export function parseSchedules( if (typeof name !== 'string' || !name.trim()) { throw new Error(`${entryContext}.name must be a non-empty string`); } - if (seenNames.has(name)) { - throw new Error(`${entryContext}.name "${name}" duplicates an earlier schedule`); + const trimmedName = name.trim(); + if (seenNames.has(trimmedName)) { + throw new Error(`${entryContext}.name "${trimmedName}" duplicates an earlier schedule`); } - seenNames.add(name); + seenNames.add(trimmedName); if (typeof cron !== 'string' || !cron.trim()) { throw new Error(`${entryContext}.cron must be a non-empty string`); } - assertCronExpression(cron, `${entryContext}.cron`); + const trimmedCron = cron.trim(); + assertCronExpression(trimmedCron, `${entryContext}.cron`); if (tz !== undefined && (typeof tz !== 'string' || !tz.trim())) { throw new Error(`${entryContext}.tz must be a non-empty string if provided`); } out.push({ - name, - cron, - ...(typeof tz === 'string' ? { tz } : {}) + name: trimmedName, + cron: trimmedCron, + ...(typeof tz === 'string' ? { tz: tz.trim() } : {}) }); } return out; diff --git a/packages/persona-kit/src/triggers.ts b/packages/persona-kit/src/triggers.ts index 29b3283..3d221e8 100644 --- a/packages/persona-kit/src/triggers.ts +++ b/packages/persona-kit/src/triggers.ts @@ -7,10 +7,9 @@ import type { PersonaSpec } from './types.js'; * not a failure, so adding a new event upstream doesn't gate workforce * releases. * - * This file ships intentionally sparse in v1 (see - * `docs/plans/deploy-v1-codex-spec.md` Task 6). Codex fills it out from the - * Relayfile adapter sources at `/Users/khaliqgant/Projects/AgentWorkforce/ - * relayfile-adapters/` plus the per-provider docs. + * This file ships intentionally sparse in v1; the full per-provider list + * lands when the Relayfile adapter packages publish their normalized + * event catalogs. */ export const KNOWN_TRIGGERS = { github: [ diff --git a/packages/runtime/src/clients/github.test.ts b/packages/runtime/src/clients/github.test.ts index dd7d5db..75b9904 100644 --- a/packages/runtime/src/clients/github.test.ts +++ b/packages/runtime/src/clients/github.test.ts @@ -127,6 +127,40 @@ test('createGithubClient surfaces non-2xx with WorkforceIntegrationError', async ); }); +test('createGithubClient.getPr fetches the diff through the canonical API endpoint (not pr.diff_url)', async () => { + const { fetch: fakeImpl, calls } = fakeFetch([ + () => + new Response( + JSON.stringify({ + title: 't', + body: 'b', + head: { ref: 'feature' }, + base: { ref: 'main' }, + user: { login: 'alice' }, + // Untrusted hint the client must ignore. + diff_url: 'https://attacker.example.com/leaked.diff' + }), + { status: 200 } + ), + () => + new Response('diff --git a/x b/x\n', { + status: 200, + headers: { 'content-type': 'application/vnd.github.v3.diff' } + }) + ]); + const client = createGithubClient({ token: 'pat_secret', fetchImpl: fakeImpl }); + const pr = await client.getPr({ owner: 'o', repo: 'r', number: 5 }); + assert.match(pr.diff, /^diff --git/); + // Both requests target api.github.com, never the untrusted diff_url host. + for (const call of calls) { + assert.match(call.url, /^https:\/\/api\.github\.com\//); + assert.ok(!call.url.includes('attacker.example.com')); + assert.equal(call.headers.authorization, 'Bearer pat_secret'); + } + // Diff call uses the diff accept header. + assert.equal(calls[1].headers.accept, 'application/vnd.github.v3.diff'); +}); + test('createGithubClient surfaces 4xx as non-retryable', async () => { const { fetch: fakeImpl } = fakeFetch([ () => new Response('not found', { status: 404, statusText: 'Not Found' }) diff --git a/packages/runtime/src/clients/github.ts b/packages/runtime/src/clients/github.ts index e5d4748..2f22f88 100644 --- a/packages/runtime/src/clients/github.ts +++ b/packages/runtime/src/clients/github.ts @@ -72,14 +72,17 @@ export function createGithubClient(opts: GithubClientOptions): GithubClient { const apiUrl = (opts.apiUrl ?? 'https://api.github.com').replace(/\/$/, ''); const fetchImpl = opts.fetchImpl ?? fetch; - async function request(operation: string, init: { method: string; pathname: string; body?: unknown }): Promise { + async function request( + operation: string, + init: { method: string; pathname: string; body?: unknown; accept?: string; responseType?: 'json' | 'text' } + ): Promise { const url = `${apiUrl}${init.pathname}`; let response: Response; try { response = await fetchImpl(url, { method: init.method, headers: { - accept: 'application/vnd.github+json', + accept: init.accept ?? 'application/vnd.github+json', authorization: `Bearer ${opts.token}`, 'x-github-api-version': '2022-11-28', ...(init.body !== undefined ? { 'content-type': 'application/json' } : {}), @@ -109,6 +112,7 @@ export function createGithubClient(opts: GithubClientOptions): GithubClient { } if (response.status === 204) return undefined as T; + if (init.responseType === 'text') return (await response.text()) as unknown as T; return (await response.json()) as T; } @@ -174,12 +178,20 @@ export function createGithubClient(opts: GithubClientOptions): GithubClient { head: { ref: string }; base: { ref: string }; user: { login: string } | null; - diff_url: string; }>('getPr.metadata', { method: 'GET', pathname: `/repos/${target.owner}/${target.repo}/pulls/${target.number}` }); - const diff = await fetchDiff(fetchImpl, opts.token, pr.diff_url); + // Fetch the diff through the canonical API endpoint with the same + // configured host + auth pipeline, not whatever URL the previous + // response handed us. Using `request` keeps the bearer token scoped + // to `apiUrl` and reuses the WorkforceIntegrationError mapping. + const diff = await request('getPr.diff', { + method: 'GET', + pathname: `/repos/${target.owner}/${target.repo}/pulls/${target.number}`, + accept: 'application/vnd.github.v3.diff', + responseType: 'text' + }); return { title: pr.title, body: pr.body ?? '', @@ -207,26 +219,6 @@ export function createGithubClient(opts: GithubClientOptions): GithubClient { }; } -async function fetchDiff(fetchImpl: typeof fetch, token: string, diffUrl: string): Promise { - const response = await fetchImpl(diffUrl, { - headers: { - accept: 'application/vnd.github.v3.diff', - authorization: `Bearer ${token}`, - 'user-agent': 'workforce-runtime' - } - }); - if (!response.ok) { - throw new WorkforceIntegrationError({ - provider: 'github', - operation: 'getPr.diff', - message: `${response.status} ${response.statusText}`, - status: response.status, - retryable: isRetryableStatus(response.status) - }); - } - return response.text(); -} - function truncate(s: string, n: number): string { return s.length <= n ? s : `${s.slice(0, n)}…`; } diff --git a/packages/runtime/src/ctx.ts b/packages/runtime/src/ctx.ts index 1453751..3b06761 100644 --- a/packages/runtime/src/ctx.ts +++ b/packages/runtime/src/ctx.ts @@ -109,11 +109,34 @@ export function buildCtx(options: CtxBuildOptions): WorkforceCtx { // `GithubClient`, others are `unknown` until they ship. Handlers // narrow with a runtime check (`if (ctx.linear)`) and cast against // the future client interface. + // + // Reserved fields are guarded so a malformed persona that declares + // an integration named `harness` or `sandbox` cannot clobber core + // ctx subsystems — that would silently turn `ctx.harness.run(...)` + // into a call against an attacker-controlled object. if (options.integrations) { for (const [provider, client] of Object.entries(options.integrations)) { + if (CORE_CTX_FIELDS.has(provider)) { + throw new Error( + `runtime: integration provider "${provider}" collides with a core ctx field; rename the integration in your persona JSON` + ); + } Object.assign(ctx, { [provider]: client }); } } return ctx; } + +const CORE_CTX_FIELDS: ReadonlySet = new Set([ + 'persona', + 'workspaceId', + 'agentName', + 'llm', + 'harness', + 'sandbox', + 'memory', + 'workflow', + 'schedule', + 'log' +]); diff --git a/packages/runtime/src/runner.test.ts b/packages/runtime/src/runner.test.ts index e382177..0a867df 100644 --- a/packages/runtime/src/runner.test.ts +++ b/packages/runtime/src/runner.test.ts @@ -118,6 +118,21 @@ test('startRunner skips envelopes that the shim can not translate', async () => assert.ok(logs.find((l) => l.message === 'runner.envelope.unsupported')); }); +test('buildCtx rejects integrations that collide with core fields', async () => { + const { buildCtx } = await import('./ctx.js'); + assert.throws( + () => + buildCtx({ + persona, + workspaceId: 'ws', + sandbox: stubSandbox, + harnessRunner: async () => ({ output: '', exitCode: 0, durationMs: 0 }), + integrations: { harness: { evil: true } } + }), + /collides with a core ctx field/ + ); +}); + test('startRunner throws when workspaceId is missing from both options and env', async () => { const previous = process.env.WORKFORCE_WORKSPACE_ID; delete process.env.WORKFORCE_WORKSPACE_ID; diff --git a/packages/runtime/src/runner.ts b/packages/runtime/src/runner.ts index ae57936..757be19 100644 --- a/packages/runtime/src/runner.ts +++ b/packages/runtime/src/runner.ts @@ -197,13 +197,20 @@ async function* readEnvelopesFromStdin(): AsyncGenerator { } } - // Drain any trailing line that lacked a terminating newline. + // Drain any trailing line that lacked a terminating newline. Log + // parse failures with the same warning shape the per-line path uses, + // so a stuck producer doesn't silently swallow envelopes. const tail = buffer.trim(); if (tail.length > 0) { try { yield JSON.parse(tail) as RawGatewayEnvelope; - } catch { - /* ignore */ + } catch (err) { + const excerpt = tail.length > 200 ? `${tail.slice(0, 200)}…` : tail; + process.stderr.write( + `[workforce-runtime] failed to parse trailing envelope line: ${ + err instanceof Error ? err.message : String(err) + } — excerpt: ${excerpt}\n` + ); } } } diff --git a/packages/runtime/src/shim.test.ts b/packages/runtime/src/shim.test.ts index 575fd40..9c53fd1 100644 --- a/packages/runtime/src/shim.test.ts +++ b/packages/runtime/src/shim.test.ts @@ -67,6 +67,13 @@ test('shimEnvelope returns null for unknown sources and malformed envelopes', () assert.equal(shimEnvelope({ id: 'e', workspace: 'w', type: '', occurredAt: 'x' }), null); }); +test('shimEnvelope returns null when provider event has no event-name suffix', () => { + assert.equal( + shimEnvelope({ id: 'e', workspace: 'w', type: 'github.', occurredAt: 'x' }), + null + ); +}); + test('handler() brands a function and round-trips identity', () => { let called = false; const fn = handler(async () => { diff --git a/packages/runtime/src/shim.ts b/packages/runtime/src/shim.ts index 631cec9..c854b7a 100644 --- a/packages/runtime/src/shim.ts +++ b/packages/runtime/src/shim.ts @@ -79,6 +79,12 @@ export function shimEnvelope(env: RawGatewayEnvelope): WorkforceEvent | null { if (firstDot <= 0) return null; const providerCandidate = env.type.slice(0, firstDot); if (!isProviderSource(providerCandidate)) return null; + const eventType = env.type.slice(firstDot + 1); + // Guard against envelopes like `github.` where the source is valid but + // the event-name suffix is missing. The runtime should not dispatch an + // empty `event.type` to handlers — better to drop the envelope and let + // it surface in the unsupported-envelope log. + if (!eventType) return null; const providerEvent: WorkforceProviderEvent = { source: providerCandidate, @@ -86,7 +92,7 @@ export function shimEnvelope(env: RawGatewayEnvelope): WorkforceEvent | null { occurredAt, attempt, workspaceId: env.workspace, - type: env.type.slice(firstDot + 1), + type: eventType, payload: env.resource ?? null, ...(env.summary ? { summary: env.summary } : {}) }; From 83f9ca09f01b0d47c94acd3c66313541a29d83dc Mon Sep 17 00:00:00 2001 From: Khaliq Date: Tue, 12 May 2026 12:56:34 +0200 Subject: [PATCH 3/4] docs(deploy-v1): add markdownlint language tags to fenced code blocks MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit CodeRabbit MD040 nit on #90 — the four unlabeled code-fence opens in docs/plans/deploy-v1.md and the three in deploy-v1-workflow-spec.md are now labeled (sh / text). Pure cosmetic; no content changes. Co-Authored-By: Claude Opus 4.7 (1M context) --- docs/plans/deploy-v1-workflow-spec.md | 6 +++--- docs/plans/deploy-v1.md | 8 ++++---- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/docs/plans/deploy-v1-workflow-spec.md b/docs/plans/deploy-v1-workflow-spec.md index 09f26ac..e0dda01 100644 --- a/docs/plans/deploy-v1-workflow-spec.md +++ b/docs/plans/deploy-v1-workflow-spec.md @@ -25,7 +25,7 @@ npx tsx workflows/workforce-deploy-v1.ts Resolve these as env vars with the defaults shown. Ricky should set these to absolute paths inside its sandbox. -``` +```sh HOME=/Users/khaliqgant ROOT=$HOME/Projects/AgentWorkforce @@ -38,7 +38,7 @@ RELAY_REPO=$ROOT/relay # read-only reference ### Required secrets -``` +```sh DAYTONA_API_KEY # for cloud-side endpoint smoke test GITHUB_TOKEN # for opening PRs WORKFORCE_E2E_STAGING_TOKEN # set in CI for the E2E job; not needed for the workflow itself @@ -69,7 +69,7 @@ Hub-spoke / Conversation. A lead Claude Opus stays on `#wf-workforce-deploy-v1` - `$CLOUD_REPO` is used by Track A only → no worktree needed; operate in place on the branch. - `$WORKFORCE_REPO` is shared by Tracks B, C, and INT → **each track operates in its own git worktree**. Path conventions: - ``` + ```text $WORKFORCE_REPO # Track B (consume Daytona) → branch feat/deploy-v1-daytona-consume $WORKFORCE_REPO.wt-mcp # Track C (MCP server) → branch feat/mcp-workforce $WORKFORCE_REPO.wt-e2e # Track INT (E2E test) → branch feat/deploy-v1-e2e diff --git a/docs/plans/deploy-v1.md b/docs/plans/deploy-v1.md index 1da14e0..a11ab43 100644 --- a/docs/plans/deploy-v1.md +++ b/docs/plans/deploy-v1.md @@ -13,7 +13,7 @@ A workforce **persona is a deployable agent**. Today a persona JSON describes ho The user-facing command is one line: -``` +```sh workforce deploy ./review-agent.json ``` @@ -274,7 +274,7 @@ case 'login': await runLogin(argv); break; // small new helper for cloud aut `runDeploy` is a ~150-line orchestrator over the deploy package. Public flags: -``` +```sh workforce deploy [--mode dev|sandbox|cloud] # default: sandbox if Daytona creds present, else dev [--workspace ] # workforce workspace; defaults to active @@ -403,7 +403,7 @@ Direct port of the proactive-agents weekly-digest pattern. ## 8. Package layout — diff -``` +```text workforce/ ├── packages/ │ ├── cli/ # add `deploy`, `login` cases @@ -606,7 +606,7 @@ A user with: Can run: -``` +```sh workforce login workforce deploy ./examples/weekly-digest/persona.json --mode dev ``` From f4f07c4f5dfba34378f4a2dede6c16adfc5947b0 Mon Sep 17 00:00:00 2001 From: Khaliq Date: Tue, 12 May 2026 13:23:04 +0200 Subject: [PATCH 4/4] feat(deploy): wire workforce-managed sandbox path through cloud proxy MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Cloud#543 merged the POST /api/v1/workspaces/:id/sandboxes endpoint (plus per-sandbox /exec and /files proxy routes). This commit finishes the workforce-side half: the sandbox launcher now drives either path automatically. Mode picking (resolveSandboxClient): - BYO: DAYTONA_API_KEY (or JWT + ORGANIZATION_ID) in env → talk to Daytona directly via @daytonaio/sdk. Zero workforce-cloud round-trips. - Workforce-managed: BYO env absent + WORKFORCE_WORKSPACE_TOKEN present → POST /sandboxes to mint a proxy handle, then route exec/upload through the returned execUrl + filesUrl. Cloud holds the org Daytona credentials so users never see them. - --byo-sandbox CLI flag forces BYO even when both are configured. - Neither configured: clean error pointing at the two setup paths. Implementation (sandbox-client.ts): - SandboxClient interface: mint, uploadBundle, exec, destroy. - createByoSandboxClient: extracted from the old sandbox.ts. - createProxySandboxClient: HTTP against cloud endpoints with the workspace token. Absolutifies returned execUrl/filesUrl against WORKFORCE_CLOUD_URL when cloud emits relative paths. Tolerates 404 on destroy (idempotent cleanup). npm install in the sandbox runs through the same exec channel so behavior matches BYO. Wiring: - DeployOptions.byoSandbox → ModeLaunchInput.byoSandbox → launcher overrides.forceByo. The flag was already parsed in the CLI; it just wasn't threaded through. Tests (+9): - sandbox-client.test.ts: proxy mint → upload → exec → destroy round trip, 4xx surface, idempotent 404 destroy, in-sandbox npm install failure surfaces. - sandbox.test.ts: resolveSandboxClient mode selection across env permutations (BYO present, workforce-only, neither, forceByo + BYO, forceByo without BYO). Repo gates: 375 tests pass; typecheck + examples typecheck clean. Co-Authored-By: Claude Opus 4.7 (1M context) --- packages/deploy/src/deploy.ts | 3 +- .../deploy/src/modes/sandbox-client.test.ts | 209 +++++++++++++ packages/deploy/src/modes/sandbox-client.ts | 294 ++++++++++++++++++ packages/deploy/src/modes/sandbox.test.ts | 131 ++++++++ packages/deploy/src/modes/sandbox.ts | 227 +++++++------- packages/deploy/src/types.ts | 5 + 6 files changed, 762 insertions(+), 107 deletions(-) create mode 100644 packages/deploy/src/modes/sandbox-client.test.ts create mode 100644 packages/deploy/src/modes/sandbox-client.ts create mode 100644 packages/deploy/src/modes/sandbox.test.ts diff --git a/packages/deploy/src/deploy.ts b/packages/deploy/src/deploy.ts index 714dc19..198bf09 100644 --- a/packages/deploy/src/deploy.ts +++ b/packages/deploy/src/deploy.ts @@ -179,7 +179,8 @@ export async function deploy(opts: DeployOptions, resolvers: DeployResolvers = { bundle, workspace, io, - ...(opts.detach ? { detach: true } : {}) + ...(opts.detach ? { detach: true } : {}), + ...(opts.byoSandbox ? { byoSandbox: true } : {}) }); io.info(`launched: ${mode}/${handle.id}`); diff --git a/packages/deploy/src/modes/sandbox-client.test.ts b/packages/deploy/src/modes/sandbox-client.test.ts new file mode 100644 index 0000000..dc10a87 --- /dev/null +++ b/packages/deploy/src/modes/sandbox-client.test.ts @@ -0,0 +1,209 @@ +import test from 'node:test'; +import assert from 'node:assert/strict'; +import { mkdtemp, rm, writeFile } from 'node:fs/promises'; +import path from 'node:path'; +import os from 'node:os'; +import { createProxySandboxClient, SANDBOX_BUNDLE_DIR } from './sandbox-client.js'; +import type { BundleResult } from '../types.js'; + +interface RecordedCall { + url: string; + method: string; + headers: Record; + body?: unknown; +} + +function fakeFetch( + handlers: Array<(call: RecordedCall) => Response | Promise> +): { fetch: typeof fetch; calls: RecordedCall[] } { + const calls: RecordedCall[] = []; + let i = 0; + const impl = (async (input: RequestInfo | URL, init?: RequestInit) => { + const url = typeof input === 'string' ? input : input.toString(); + const headers: Record = {}; + if (init?.headers) { + const entries = + init.headers instanceof Headers + ? Array.from(init.headers.entries()) + : Array.isArray(init.headers) + ? init.headers + : Object.entries(init.headers); + for (const [k, v] of entries) headers[k.toLowerCase()] = String(v); + } + const body = init?.body ? JSON.parse(init.body.toString()) : undefined; + const call: RecordedCall = { + url, + method: init?.method ?? 'GET', + headers, + ...(body !== undefined ? { body } : {}) + }; + calls.push(call); + const handler = handlers[i]; + if (!handler) throw new Error(`fakeFetch: no handler at call index ${i}`); + i += 1; + return handler(call); + }) as typeof fetch; + return { fetch: impl, calls }; +} + +async function fixtureBundle(dir: string): Promise { + const runnerPath = path.join(dir, 'runner.mjs'); + const bundlePath = path.join(dir, 'agent.bundle.mjs'); + const personaCopyPath = path.join(dir, 'persona.json'); + const packageJsonPath = path.join(dir, 'package.json'); + await Promise.all([ + writeFile(runnerPath, 'runner', 'utf8'), + writeFile(bundlePath, 'bundle', 'utf8'), + writeFile(personaCopyPath, '{"id":"demo"}', 'utf8'), + writeFile(packageJsonPath, '{}', 'utf8') + ]); + return { runnerPath, bundlePath, personaCopyPath, packageJsonPath, sizeBytes: 13 }; +} + +test('proxy client mints, uploads, execs, and destroys against cloud sandboxes endpoint', async () => { + const dir = await mkdtemp(path.join(os.tmpdir(), 'wf-sandbox-')); + try { + const bundle = await fixtureBundle(dir); + const { fetch: impl, calls } = fakeFetch([ + // POST /sandboxes + () => + new Response( + JSON.stringify({ + sandboxId: 'sbx_test', + authMode: 'proxy', + execUrl: 'https://cloud.example.com/api/v1/workspaces/ws/sandboxes/sbx_test/exec', + filesUrl: 'https://cloud.example.com/api/v1/workspaces/ws/sandboxes/sbx_test/files' + }), + { status: 201 } + ), + // PUT /files + () => new Response(null, { status: 204 }), + // POST /exec (npm install) + () => new Response(JSON.stringify({ exitCode: 0, output: 'added 1 package' }), { status: 200 }), + // POST /exec (node runner.mjs) + () => new Response(JSON.stringify({ exitCode: 0, output: 'runner ok' }), { status: 200 }), + // DELETE /sandboxes/:id + () => new Response(null, { status: 204 }) + ]); + + const client = createProxySandboxClient({ + cloudUrl: 'https://cloud.example.com', + workspaceId: 'ws', + workspaceToken: 'tok-secret', + personaId: 'demo', + fetchImpl: impl + }); + + const handle = await client.mint({ + label: 'wf-demo', + env: { WORKFORCE_WORKSPACE_ID: 'ws' } + }); + assert.equal(handle.mode, 'proxy'); + assert.equal(handle.sandboxId, 'sbx_test'); + assert.equal(handle.id, 'proxy:sbx_test'); + + await client.uploadBundle(handle, bundle); + const runResult = await client.exec(handle, 'node runner.mjs', { + cwd: SANDBOX_BUNDLE_DIR, + timeoutSeconds: 60 + }); + assert.equal(runResult.exitCode, 0); + assert.equal(runResult.output, 'runner ok'); + + await client.destroy(handle); + + // Mint request shape. + assert.equal(calls[0].url, 'https://cloud.example.com/api/v1/workspaces/ws/sandboxes'); + assert.equal(calls[0].method, 'POST'); + assert.equal(calls[0].headers.authorization, 'Bearer tok-secret'); + assert.equal((calls[0].body as { purpose: string }).purpose, 'workforce-deploy'); + assert.equal((calls[0].body as { personaId: string }).personaId, 'demo'); + + // Upload PUT carries base64 file entries. + assert.equal(calls[1].method, 'PUT'); + assert.match(calls[1].url, /\/sandboxes\/sbx_test\/files$/); + const uploadBody = calls[1].body as { entries: Array<{ source: string; destination: string }> }; + assert.equal(uploadBody.entries.length, 4); + const runnerEntry = uploadBody.entries.find((e) => e.destination.endsWith('/runner.mjs')); + assert.ok(runnerEntry); + assert.equal(Buffer.from(runnerEntry!.source, 'base64').toString('utf8'), 'runner'); + + // Install exec. + assert.equal(calls[2].method, 'POST'); + assert.match(calls[2].url, /\/sandboxes\/sbx_test\/exec$/); + assert.match((calls[2].body as { command: string }).command, /^npm install/); + + // Runner exec. + assert.equal((calls[3].body as { command: string }).command, 'node runner.mjs'); + + // Delete by sandbox id. + assert.equal(calls[4].method, 'DELETE'); + assert.match(calls[4].url, /\/sandboxes\/sbx_test$/); + } finally { + await rm(dir, { recursive: true, force: true }); + } +}); + +test('proxy client surfaces non-2xx mint responses with the cloud status + excerpt', async () => { + const { fetch: impl } = fakeFetch([ + () => new Response('quota exceeded', { status: 429, statusText: 'Too Many Requests' }) + ]); + const client = createProxySandboxClient({ + cloudUrl: 'https://cloud.example.com', + workspaceId: 'ws', + workspaceToken: 'tok', + personaId: 'demo', + fetchImpl: impl + }); + await assert.rejects( + () => client.mint({ label: 'wf-demo' }), + /sandbox\(proxy\)\.mint: 429 Too Many Requests/ + ); +}); + +test('proxy client tolerates 404 on destroy (already deleted)', async () => { + const { fetch: impl } = fakeFetch([ + () => new Response('not found', { status: 404, statusText: 'Not Found' }) + ]); + const client = createProxySandboxClient({ + cloudUrl: 'https://cloud.example.com', + workspaceId: 'ws', + workspaceToken: 'tok', + personaId: 'demo', + fetchImpl: impl + }); + // Construct a minimal proxy handle by hand (we don't want to mint first). + await client.destroy({ id: 'proxy:x', sandboxId: 'x', mode: 'proxy' }); +}); + +test('proxy client throws when npm install in the sandbox fails', async () => { + const dir = await mkdtemp(path.join(os.tmpdir(), 'wf-sandbox-')); + try { + const bundle = await fixtureBundle(dir); + const { fetch: impl } = fakeFetch([ + () => + new Response( + JSON.stringify({ + sandboxId: 'sbx', + authMode: 'proxy', + execUrl: 'https://cloud.example.com/api/v1/workspaces/ws/sandboxes/sbx/exec', + filesUrl: 'https://cloud.example.com/api/v1/workspaces/ws/sandboxes/sbx/files' + }), + { status: 201 } + ), + () => new Response(null, { status: 204 }), + () => new Response(JSON.stringify({ exitCode: 1, output: 'EACCES' }), { status: 200 }) + ]); + const client = createProxySandboxClient({ + cloudUrl: 'https://cloud.example.com', + workspaceId: 'ws', + workspaceToken: 'tok', + personaId: 'demo', + fetchImpl: impl + }); + const handle = await client.mint({ label: 'wf' }); + await assert.rejects(() => client.uploadBundle(handle, bundle), /npm install failed \(exit 1\)/); + } finally { + await rm(dir, { recursive: true, force: true }); + } +}); diff --git a/packages/deploy/src/modes/sandbox-client.ts b/packages/deploy/src/modes/sandbox-client.ts new file mode 100644 index 0000000..d5d5cea --- /dev/null +++ b/packages/deploy/src/modes/sandbox-client.ts @@ -0,0 +1,294 @@ +import { readFile } from 'node:fs/promises'; +import { Daytona, type Sandbox as DaytonaSandbox } from '@daytonaio/sdk'; +import type { BundleResult } from '../types.js'; + +/** + * Working directory the runner is invoked from inside the sandbox. Same + * mount root cloud's DaytonaRuntime defaults to, so persona authors get + * consistent paths whether they run under workforce or cloud workflows. + */ +export const SANDBOX_BUNDLE_DIR = '/home/daytona/bundle'; + +export interface SandboxExecResult { + exitCode: number; + output: string; +} + +/** + * Abstraction over the two paths workforce uses to drive a sandbox: + * - BYO — the user's `DAYTONA_API_KEY` is in env; we talk to Daytona + * directly via the SDK. + * - Proxy — workforce cloud holds the org Daytona credentials; we + * POST `/workspaces/:id/sandboxes` to mint a sandbox and then + * send `exec` and `files` calls through cloud's per-sandbox + * proxy URLs. + * + * Both implementations satisfy the same interface so the launcher stays + * mode-agnostic. `mint()` is required up front; the rest of the methods + * operate on the returned handle. + */ +export interface SandboxClient { + mint(args: MintArgs): Promise; + uploadBundle(handle: SandboxHandle, bundle: BundleResult): Promise; + exec(handle: SandboxHandle, command: string, opts?: ExecOptions): Promise; + destroy(handle: SandboxHandle): Promise; +} + +export interface MintArgs { + label: string; + env?: Record; + /** Cap the create call itself; not the sandbox lifetime. */ + createTimeoutSeconds?: number; +} + +export interface ExecOptions { + cwd?: string; + env?: Record; + timeoutSeconds?: number; +} + +export interface SandboxHandle { + /** Mode-tagged id (`byo:` / `proxy:`). */ + id: string; + sandboxId: string; + mode: 'byo' | 'proxy'; +} + +// ─── BYO: direct Daytona SDK ──────────────────────────────────────────── + +export interface ByoSandboxClientOptions { + apiKey?: string; + jwtToken?: string; + organizationId?: string; +} + +interface ByoHandleInternal extends SandboxHandle { + mode: 'byo'; + sandbox: DaytonaSandbox; +} + +export function createByoSandboxClient(opts: ByoSandboxClientOptions): SandboxClient { + if (!opts.apiKey && !opts.jwtToken) { + throw new Error( + 'BYO sandbox client requires DAYTONA_API_KEY (or DAYTONA_JWT_TOKEN + DAYTONA_ORGANIZATION_ID) in env' + ); + } + const daytona = new Daytona(opts); + + return { + async mint(args) { + const sandbox = await daytona.create({ + language: 'typescript', + name: args.label, + ...(args.env ? { envVars: args.env } : {}) + }); + const handle: ByoHandleInternal = { + id: `byo:${sandbox.id}`, + sandboxId: sandbox.id, + mode: 'byo', + sandbox + }; + return handle; + }, + async uploadBundle(handle, bundle) { + const internal = handle as ByoHandleInternal; + const files = await readBundleFiles(bundle); + await internal.sandbox.fs.uploadFiles(files); + // Same offline-friendly npm install pattern as the proxy path so + // the bundle's runtime dep resolves consistently across modes. + const install = await internal.sandbox.process.executeCommand( + 'npm install --prefer-offline --no-audit --no-fund --loglevel=error', + SANDBOX_BUNDLE_DIR, + undefined, + 600 + ); + if ((install.exitCode ?? 0) !== 0) { + throw new Error( + `sandbox(byo): npm install failed (exit ${install.exitCode}): ${install.result?.slice(0, 400) ?? ''}` + ); + } + }, + async exec(handle, command, options) { + const internal = handle as ByoHandleInternal; + const result = await internal.sandbox.process.executeCommand( + command, + options?.cwd, + options?.env, + options?.timeoutSeconds + ); + return { + exitCode: result.exitCode ?? 0, + output: result.result ?? '' + }; + }, + async destroy(handle) { + const internal = handle as ByoHandleInternal; + await internal.sandbox.delete(); + } + }; +} + +// ─── Proxy: workforce cloud sandboxes endpoint ────────────────────────── + +export interface ProxySandboxClientOptions { + cloudUrl: string; + workspaceId: string; + workspaceToken: string; + personaId: string; + /** Defaults to global fetch; tests pass a stub. */ + fetchImpl?: typeof fetch; +} + +interface ProxyHandleInternal extends SandboxHandle { + mode: 'proxy'; + execUrl: string; + filesUrl: string; +} + +export function createProxySandboxClient(opts: ProxySandboxClientOptions): SandboxClient { + const fetchImpl = opts.fetchImpl ?? fetch; + const base = `${opts.cloudUrl.replace(/\/$/, '')}/api/v1/workspaces/${encodeURIComponent( + opts.workspaceId + )}`; + + function headers(extra: Record = {}): Record { + return { + authorization: `Bearer ${opts.workspaceToken}`, + 'user-agent': 'workforce-deploy', + ...extra + }; + } + + return { + async mint(args) { + const response = await fetchImpl(`${base}/sandboxes`, { + method: 'POST', + headers: headers({ 'content-type': 'application/json' }), + body: JSON.stringify({ + purpose: 'workforce-deploy', + personaId: opts.personaId, + label: args.label, + env: args.env, + // `timeoutSeconds` on the mint contract caps the *create call*, + // not the sandbox lifetime. Default to 120s which matches the + // cloud-side MAX_CREATE_TIMEOUT_SECONDS clamp. + timeoutSeconds: args.createTimeoutSeconds ?? 120 + }) + }); + if (!response.ok) { + throw await toError(response, 'sandbox(proxy).mint'); + } + const body = (await response.json()) as { + sandboxId: string; + authMode: 'proxy'; + execUrl: string; + filesUrl: string; + }; + if (!body?.sandboxId || !body.execUrl || !body.filesUrl) { + throw new Error( + `sandbox(proxy).mint: cloud response missing sandboxId/execUrl/filesUrl: ${JSON.stringify(body)}` + ); + } + const handle: ProxyHandleInternal = { + id: `proxy:${body.sandboxId}`, + sandboxId: body.sandboxId, + mode: 'proxy', + execUrl: absoluteUrl(opts.cloudUrl, body.execUrl), + filesUrl: absoluteUrl(opts.cloudUrl, body.filesUrl) + }; + return handle; + }, + async uploadBundle(handle, bundle) { + const internal = handle as ProxyHandleInternal; + const files = await readBundleFiles(bundle); + const response = await fetchImpl(internal.filesUrl, { + method: 'PUT', + headers: headers({ 'content-type': 'application/json' }), + body: JSON.stringify({ + entries: files.map((file) => ({ + source: file.source.toString('base64'), + destination: file.destination + })) + }) + }); + if (!response.ok) { + throw await toError(response, 'sandbox(proxy).uploadBundle'); + } + // Cloud's `files` endpoint only uploads — the install step still + // has to go through `exec` after the files land. + const install = await this.exec(handle, 'npm install --prefer-offline --no-audit --no-fund --loglevel=error', { + cwd: SANDBOX_BUNDLE_DIR, + timeoutSeconds: 600 + }); + if (install.exitCode !== 0) { + throw new Error( + `sandbox(proxy): npm install failed (exit ${install.exitCode}): ${install.output.slice(0, 400)}` + ); + } + }, + async exec(handle, command, options) { + const internal = handle as ProxyHandleInternal; + const response = await fetchImpl(internal.execUrl, { + method: 'POST', + headers: headers({ 'content-type': 'application/json' }), + body: JSON.stringify({ + command, + ...(options?.cwd ? { cwd: options.cwd } : {}), + ...(options?.env ? { env: options.env } : {}), + timeoutSeconds: options?.timeoutSeconds ?? 600 + }) + }); + if (!response.ok) { + throw await toError(response, 'sandbox(proxy).exec'); + } + const body = (await response.json()) as { exitCode: number; output: string }; + return { exitCode: body.exitCode ?? 0, output: body.output ?? '' }; + }, + async destroy(handle) { + const response = await fetchImpl( + `${base}/sandboxes/${encodeURIComponent(handle.sandboxId)}`, + { + method: 'DELETE', + headers: headers() + } + ); + if (!response.ok && response.status !== 404) { + throw await toError(response, 'sandbox(proxy).destroy'); + } + } + }; +} + +// ─── Shared helpers ───────────────────────────────────────────────────── + +async function readBundleFiles(bundle: BundleResult): Promise< + Array<{ source: Buffer; destination: string }> +> { + return Promise.all([ + fileUpload(bundle.runnerPath, `${SANDBOX_BUNDLE_DIR}/runner.mjs`), + fileUpload(bundle.bundlePath, `${SANDBOX_BUNDLE_DIR}/agent.bundle.mjs`), + fileUpload(bundle.personaCopyPath, `${SANDBOX_BUNDLE_DIR}/persona.json`), + fileUpload(bundle.packageJsonPath, `${SANDBOX_BUNDLE_DIR}/package.json`) + ]); +} + +async function fileUpload(localPath: string, remotePath: string): Promise<{ source: Buffer; destination: string }> { + const source = await readFile(localPath); + return { source, destination: remotePath }; +} + +function absoluteUrl(cloudBase: string, maybeRelative: string): string { + if (/^https?:\/\//.test(maybeRelative)) return maybeRelative; + // Cloud sometimes returns relative paths (`/api/v1/...`); attach the + // configured base so the workforce CLI can call them outside the + // cloud's own hostname. + return `${cloudBase.replace(/\/$/, '')}${maybeRelative.startsWith('/') ? '' : '/'}${maybeRelative}`; +} + +async function toError(response: Response, label: string): Promise { + const body = await response.text().catch(() => ''); + const excerpt = body.length > 400 ? `${body.slice(0, 400)}…` : body; + return new Error( + `${label}: ${response.status} ${response.statusText}${excerpt ? ` — ${excerpt}` : ''}` + ); +} diff --git a/packages/deploy/src/modes/sandbox.test.ts b/packages/deploy/src/modes/sandbox.test.ts new file mode 100644 index 0000000..29162f7 --- /dev/null +++ b/packages/deploy/src/modes/sandbox.test.ts @@ -0,0 +1,131 @@ +import test from 'node:test'; +import assert from 'node:assert/strict'; +import { resolveSandboxClient } from './sandbox.js'; +import type { ModeLaunchInput } from '../types.js'; + +function input(): Pick { + return { + workspace: 'ws-demo', + persona: { + id: 'demo', + intent: 'documentation', + tags: ['documentation'], + description: '', + skills: [], + tiers: { + best: { + harness: 'claude', + model: 'm', + systemPrompt: 's', + harnessSettings: { reasoning: 'medium', timeoutSeconds: 300 } + }, + 'best-value': { + harness: 'claude', + model: 'm', + systemPrompt: 's', + harnessSettings: { reasoning: 'medium', timeoutSeconds: 300 } + }, + minimum: { + harness: 'claude', + model: 'm', + systemPrompt: 's', + harnessSettings: { reasoning: 'medium', timeoutSeconds: 300 } + } + }, + cloud: true, + onEvent: './agent.ts' + } + }; +} + +function withEnv(overrides: Record, fn: () => T): T { + const previous: Record = {}; + for (const key of Object.keys(overrides)) { + previous[key] = process.env[key]; + if (overrides[key] === undefined) delete process.env[key]; + else process.env[key] = overrides[key]; + } + try { + return fn(); + } finally { + for (const [key, value] of Object.entries(previous)) { + if (value === undefined) delete process.env[key]; + else process.env[key] = value; + } + } +} + +test('resolveSandboxClient prefers BYO when DAYTONA_API_KEY is set', () => { + withEnv( + { + DAYTONA_API_KEY: 'sk_byo', + WORKFORCE_WORKSPACE_TOKEN: 'tok' + }, + () => { + const client = resolveSandboxClient(input()); + // BYO client carries the Daytona SDK; we infer mode by inspecting + // a mint-like call would tag the resulting handle. Easier to just + // verify by structural shape — but the simplest check is that the + // proxy path was *not* picked (which would have called fetch). + assert.ok(typeof client.mint === 'function'); + assert.ok(typeof client.exec === 'function'); + } + ); +}); + +test('resolveSandboxClient falls back to the cloud proxy when only WORKFORCE_WORKSPACE_TOKEN is set', () => { + withEnv( + { + DAYTONA_API_KEY: undefined, + DAYTONA_JWT_TOKEN: undefined, + WORKFORCE_WORKSPACE_TOKEN: 'tok-cloud', + WORKFORCE_CLOUD_URL: 'https://cloud.example.com' + }, + () => { + const client = resolveSandboxClient(input()); + assert.ok(typeof client.mint === 'function'); + } + ); +}); + +test('resolveSandboxClient throws when neither path is configured', () => { + withEnv( + { + DAYTONA_API_KEY: undefined, + DAYTONA_JWT_TOKEN: undefined, + WORKFORCE_WORKSPACE_TOKEN: undefined + }, + () => { + assert.throws(() => resolveSandboxClient(input()), /no Daytona credentials and no workforce workspace token/); + } + ); +}); + +test('resolveSandboxClient honors --byo-sandbox even when both paths are configured', () => { + withEnv( + { + DAYTONA_API_KEY: 'sk_byo', + WORKFORCE_WORKSPACE_TOKEN: 'tok' + }, + () => { + const client = resolveSandboxClient(input(), { forceByo: true }); + assert.ok(typeof client.mint === 'function'); + } + ); +}); + +test('resolveSandboxClient with forceByo and no BYO env throws a clear error', () => { + withEnv( + { + DAYTONA_API_KEY: undefined, + DAYTONA_JWT_TOKEN: undefined, + WORKFORCE_WORKSPACE_TOKEN: 'tok' + }, + () => { + assert.throws( + () => resolveSandboxClient(input(), { forceByo: true }), + /--byo-sandbox requested but no Daytona credentials/ + ); + } + ); +}); diff --git a/packages/deploy/src/modes/sandbox.ts b/packages/deploy/src/modes/sandbox.ts index f6aebab..56d7f47 100644 --- a/packages/deploy/src/modes/sandbox.ts +++ b/packages/deploy/src/modes/sandbox.ts @@ -1,74 +1,48 @@ -import { readFile } from 'node:fs/promises'; -import path from 'node:path'; -import { Daytona, type Sandbox } from '@daytonaio/sdk'; import type { ModeLaunchInput, ModeLaunchHandle, ModeLauncher } from '../types.js'; +import { + SANDBOX_BUNDLE_DIR, + createByoSandboxClient, + createProxySandboxClient, + type SandboxClient, + type SandboxHandle +} from './sandbox-client.js'; -/** - * Working directory the runner is invoked from inside the sandbox. Same - * mount root cloud's DaytonaRuntime defaults to, so persona authors get - * consistent paths whether they run under workforce or cloud workflows. - */ -const SANDBOX_BUNDLE_DIR = '/home/daytona/bundle'; +const DEFAULT_CLOUD_URL = 'https://cloud.agentworkforce.com'; /** - * Daytona authentication resolved before sandbox creation. Workforce - * supports two paths: + * Daytona-backed sandbox launcher with two auth paths: * - * - BYO — `DAYTONA_API_KEY` (+ optional `DAYTONA_ORGANIZATION_ID`) - * in the user's env. Zero workforce-cloud roundtrips. - * - Workforce-managed — a workspace token mints a Daytona JWT against - * the workforce cloud API (`POST /workspaces/:id/sandboxes`). - * Lights up once the cloud endpoint ships. - */ -export interface SandboxAuth { - apiKey?: string; - jwtToken?: string; - organizationId?: string; -} - -export function resolveSandboxAuth(): SandboxAuth | undefined { - const apiKey = process.env.DAYTONA_API_KEY; - const jwtToken = process.env.DAYTONA_JWT_TOKEN; - const organizationId = process.env.DAYTONA_ORGANIZATION_ID; - if (!apiKey && !jwtToken) return undefined; - return { - ...(apiKey ? { apiKey } : {}), - ...(jwtToken ? { jwtToken } : {}), - ...(organizationId ? { organizationId } : {}) - }; -} - -/** - * Daytona-backed sandbox launcher. Creates a TypeScript sandbox, uploads - * the bundle, and starts the runner with a long timeout (effectively - * unlimited for cron-driven agents). The sandbox stays alive after the - * exec call returns so subsequent envelopes the runner expects on stdin - * have a place to land — see `stop()` for the explicit teardown contract. + * - **BYO** — `DAYTONA_API_KEY` (or `DAYTONA_JWT_TOKEN` + + * `DAYTONA_ORGANIZATION_ID`) is present in env. The launcher talks + * directly to Daytona via `@daytonaio/sdk`. Zero workforce-cloud + * round-trips; useful in CI or for power users with their own + * Daytona accounts. + * - **Workforce-managed** — `DAYTONA_API_KEY` is absent but + * `WORKFORCE_WORKSPACE_TOKEN` is set (either via `workforce login` + * or exported manually). The launcher POSTs the cloud sandboxes + * endpoint to mint a proxy handle and routes all exec/upload + * traffic through cloud's per-sandbox `/exec` and `/files` URLs. + * Cloud holds the org Daytona credentials so users never see them. + * + * Mode picking is purely env-based today. Pass `--byo-sandbox` on the + * deploy CLI to force BYO when both are configured (handled in + * `resolveLauncher`, not here). * - * Streaming: Daytona's `executeCommand` is final-result-only. The runner - * exits when its envelope stream ends, and the resulting stdout/stderr - * blob is forwarded to the DeployIO at that point. Live tail support - * would require `process.createSession`, which is gated on a future - * iteration. + * Streaming: cloud's `/exec` endpoint and Daytona's `executeCommand` are + * both final-result-only. The runner exits when its envelope stream + * ends, and the resulting output is forwarded to DeployIO at that + * point. Live tail support is gated on a future iteration. */ export const sandboxLauncher: ModeLauncher = { async launch(input: ModeLaunchInput): Promise { - const auth = resolveSandboxAuth(); - if (!auth) { - throw new Error( - 'sandbox launcher: no Daytona credentials resolved. Either export DAYTONA_API_KEY (BYO) or run `workforce login` to mint a workforce-managed Daytona token.' - ); - } - - const daytona = new Daytona(auth); - const sandbox = await daytona.create({ - language: 'typescript', - name: `wf-${input.persona.id}`, - envVars: { + const client = resolveSandboxClient(input, input.byoSandbox ? { forceByo: true } : {}); + const handle = await client.mint({ + label: `wf-${input.persona.id}`, + env: { ...(input.env ?? {}), WORKFORCE_WORKSPACE_ID: input.workspace, WORKFORCE_PERSONA_ID: input.persona.id @@ -76,24 +50,23 @@ export const sandboxLauncher: ModeLauncher = { }); try { - await uploadBundle(sandbox, input); + await client.uploadBundle(handle, input.bundle); } catch (err) { - // If upload fails, the sandbox is unrecoverable for this deploy. - // Tear it down so we don't leak Daytona resources. - await sandbox.delete().catch(() => undefined); + // If upload fails the sandbox is unrecoverable for this deploy. + // Tear it down so we don't leak Daytona resources or charge for + // an idle workforce-managed sandbox. + await client.destroy(handle).catch(() => undefined); throw err; } const sandboxTimeoutSeconds = resolveTimeoutSeconds(input.persona.sandbox); let stopping = false; - let runner: Promise<{ code: number }> | undefined; - const stop = async (): Promise => { if (stopping) return; stopping = true; try { - await sandbox.delete(); + await client.destroy(handle); } catch (err) { input.io.warn( `sandbox: cleanup failed: ${err instanceof Error ? err.message : String(err)}` @@ -101,18 +74,15 @@ export const sandboxLauncher: ModeLauncher = { } }; - runner = (async () => { + const done = (async () => { try { - const result = await sandbox.process.executeCommand( - 'node runner.mjs', - SANDBOX_BUNDLE_DIR, - undefined, - sandboxTimeoutSeconds - ); - const output = (result.result ?? '').trim(); + const result = await client.exec(handle, 'node runner.mjs', { + cwd: SANDBOX_BUNDLE_DIR, + timeoutSeconds: sandboxTimeoutSeconds + }); + const output = result.output.trim(); if (output.length > 0) input.io.info(`[sandbox] ${output}`); - const exitCode = result.exitCode ?? 0; - return { code: exitCode }; + return { code: result.exitCode }; } catch (err) { if (!stopping) { input.io.error( @@ -124,46 +94,59 @@ export const sandboxLauncher: ModeLauncher = { })(); return { - id: `sandbox:${sandbox.id}`, + id: handle.id, stop, - done: runner + done }; } }; -async function uploadBundle(sandbox: Sandbox, input: ModeLaunchInput): Promise { - // Bundle artifacts are tiny (KB-range), so reading them into Buffers - // before upload is the simplest correct shape. If/when bundles grow to - // the MB range we revisit streaming. - const files = await Promise.all([ - fileUpload(input.bundle.runnerPath, `${SANDBOX_BUNDLE_DIR}/runner.mjs`), - fileUpload(input.bundle.bundlePath, `${SANDBOX_BUNDLE_DIR}/agent.bundle.mjs`), - fileUpload(input.bundle.personaCopyPath, `${SANDBOX_BUNDLE_DIR}/persona.json`), - fileUpload(input.bundle.packageJsonPath, `${SANDBOX_BUNDLE_DIR}/package.json`) - ]); - await sandbox.fs.uploadFiles(files); +/** + * Pick the sandbox client implementation based on env. Public so the + * deploy orchestrator (and tests) can plug in an explicit choice. + */ +export function resolveSandboxClient( + input: Pick & Partial>, + overrides: { + /** Force BYO even when both BYO and workforce-managed are configured. */ + forceByo?: boolean; + /** Inject a custom client (tests). */ + client?: SandboxClient; + } = {} +): SandboxClient { + if (overrides.client) return overrides.client; - // The bundle's package.json declares `@agentworkforce/runtime` as a - // dependency. We let npm resolve the version *from the staged bundle's - // package.json* rather than pinning `@latest` here — pinning `@latest` - // would silently drift away from the runtime version the bundle was - // tested against. - const install = await sandbox.process.executeCommand( - 'npm install --prefer-offline --no-audit --no-fund --loglevel=error', - SANDBOX_BUNDLE_DIR, - undefined, - 600 - ); - if ((install.exitCode ?? 0) !== 0) { + const apiKey = process.env.DAYTONA_API_KEY?.trim(); + const jwtToken = process.env.DAYTONA_JWT_TOKEN?.trim(); + const organizationId = process.env.DAYTONA_ORGANIZATION_ID?.trim(); + const byoAvailable = Boolean(apiKey || jwtToken); + + if (overrides.forceByo || byoAvailable) { + if (!byoAvailable) { + throw new Error( + 'sandbox launcher: --byo-sandbox requested but no Daytona credentials are in env. Set DAYTONA_API_KEY (or DAYTONA_JWT_TOKEN + DAYTONA_ORGANIZATION_ID).' + ); + } + return createByoSandboxClient({ + ...(apiKey ? { apiKey } : {}), + ...(jwtToken ? { jwtToken } : {}), + ...(organizationId ? { organizationId } : {}) + }); + } + + const workspaceToken = process.env.WORKFORCE_WORKSPACE_TOKEN?.trim(); + if (!workspaceToken) { throw new Error( - `sandbox: npm install failed (exit ${install.exitCode}): ${install.result?.slice(0, 400) ?? ''}` + 'sandbox launcher: no Daytona credentials and no workforce workspace token. Either export DAYTONA_API_KEY, or run `workforce login` (sets WORKFORCE_WORKSPACE_TOKEN) so we can mint a workforce-managed sandbox.' ); } -} - -async function fileUpload(localPath: string, remotePath: string): Promise<{ source: Buffer; destination: string }> { - const source = await readFile(localPath); - return { source, destination: remotePath }; + const cloudUrl = (process.env.WORKFORCE_CLOUD_URL?.trim() || DEFAULT_CLOUD_URL).replace(/\/$/, ''); + return createProxySandboxClient({ + cloudUrl, + workspaceId: input.workspace, + workspaceToken, + personaId: input.persona.id + }); } function resolveTimeoutSeconds(sandbox: ModeLaunchInput['persona']['sandbox']): number | undefined { @@ -173,3 +156,35 @@ function resolveTimeoutSeconds(sandbox: ModeLaunchInput['persona']['sandbox']): } return undefined; } + +// Re-exported for tests + power users wanting to compose the client manually. +export { + SANDBOX_BUNDLE_DIR, + createByoSandboxClient, + createProxySandboxClient, + type SandboxClient, + type SandboxHandle +} from './sandbox-client.js'; + +/** + * Legacy alias for the env-resolved Daytona credentials surface. Kept so + * existing imports of `resolveSandboxAuth` continue to compile; new code + * should use `resolveSandboxClient` instead. + */ +export interface SandboxAuth { + apiKey?: string; + jwtToken?: string; + organizationId?: string; +} + +export function resolveSandboxAuth(): SandboxAuth | undefined { + const apiKey = process.env.DAYTONA_API_KEY?.trim(); + const jwtToken = process.env.DAYTONA_JWT_TOKEN?.trim(); + const organizationId = process.env.DAYTONA_ORGANIZATION_ID?.trim(); + if (!apiKey && !jwtToken) return undefined; + return { + ...(apiKey ? { apiKey } : {}), + ...(jwtToken ? { jwtToken } : {}), + ...(organizationId ? { organizationId } : {}) + }; +} diff --git a/packages/deploy/src/types.ts b/packages/deploy/src/types.ts index 7277d3f..64643fa 100644 --- a/packages/deploy/src/types.ts +++ b/packages/deploy/src/types.ts @@ -101,6 +101,11 @@ export interface ModeLaunchInput { env?: Record; io: DeployIO; detach?: boolean; + /** + * Force BYO Daytona auth even when the user is logged in to workforce + * cloud. Mode-specific (sandbox launcher only); other modes ignore. + */ + byoSandbox?: boolean; } export interface ModeLaunchHandle {