From ccaf4fc9cd96863f24b2ac5933e0ceff99539e07 Mon Sep 17 00:00:00 2001 From: Brian Love Date: Sun, 5 Apr 2026 18:23:07 -0700 Subject: [PATCH 1/2] feat(cockpit): production deployment with serverless API proxy - Switch all 14 Angular apps to @angular/build:application - Deploy all 14 Python backends to LangGraph Cloud - Add Vercel serverless proxy that injects x-api-key server-side - Revert Angular environments to relative /api URLs (proxy handles routing) - Build Output API for proper multi-segment catch-all routing - Angular examples hosted at examples.stream-resource.dev - Deployment verification script and production smoke tests - CI jobs for examples deploy and production smoke --- .github/workflows/deploy-langgraph.yml | 45 ++-- .../angular/src/app/filesystem.component.ts | 95 +++++-- .../angular/src/environments/environment.ts | 6 +- .../filesystem/python/langgraph.json | 7 +- .../angular/src/app/memory.component.ts | 60 +++-- .../angular/src/environments/environment.ts | 6 +- .../deep-agents/memory/python/langgraph.json | 7 +- .../angular/src/app/planning.component.ts | 67 +++-- .../angular/src/environments/environment.ts | 6 +- .../planning/python/langgraph.json | 7 +- .../angular/src/app/sandboxes.component.ts | 109 ++++---- .../angular/src/environments/environment.ts | 6 +- .../sandboxes/python/langgraph.json | 7 +- .../angular/src/app/skills.component.ts | 99 ++++--- .../angular/src/environments/environment.ts | 6 +- .../deep-agents/skills/python/langgraph.json | 7 +- .../angular/src/app/subagents.component.ts | 79 +++--- .../angular/src/environments/environment.ts | 6 +- .../subagents/python/langgraph.json | 7 +- .../angular/src/environments/environment.ts | 6 +- .../deployment-runtime/python/langgraph.json | 7 +- .../src/app/durable-execution.component.ts | 142 +++++----- .../angular/src/environments/environment.ts | 6 +- .../durable-execution/python/langgraph.json | 7 +- .../angular/src/environments/environment.ts | 6 +- .../interrupts/python/langgraph.json | 7 +- .../angular/src/environments/environment.ts | 6 +- .../langgraph/memory/python/langgraph.json | 7 +- .../angular/src/app/persistence.component.ts | 101 ++++--- .../angular/src/environments/environment.ts | 6 +- .../persistence/python/langgraph.json | 7 +- .../angular/src/environments/environment.ts | 6 +- .../langgraph/streaming/python/langgraph.json | 7 +- .../angular/src/environments/environment.ts | 6 +- .../langgraph/subgraphs/python/langgraph.json | 7 +- .../angular/src/app/time-travel.component.ts | 102 ++++--- .../angular/src/environments/environment.ts | 6 +- .../time-travel/python/langgraph.json | 7 +- ...05-cockpit-examples-tier1-customization.md | 249 ------------------ scripts/assemble-examples.ts | 48 +++- scripts/examples-middleware.ts | 142 ++++++++++ scripts/verify-langgraph-deployments.ts | 11 +- vercel.examples.json | 1 + 43 files changed, 856 insertions(+), 676 deletions(-) delete mode 100644 docs/superpowers/specs/2026-04-05-cockpit-examples-tier1-customization.md create mode 100644 scripts/examples-middleware.ts diff --git a/.github/workflows/deploy-langgraph.yml b/.github/workflows/deploy-langgraph.yml index d1dfd4a98..0a79d83c9 100644 --- a/.github/workflows/deploy-langgraph.yml +++ b/.github/workflows/deploy-langgraph.yml @@ -19,33 +19,33 @@ jobs: strategy: matrix: include: - - name: langgraph-streaming + - name: streaming path: cockpit/langgraph/streaming/python - - name: langgraph-persistence + - name: persistence path: cockpit/langgraph/persistence/python - - name: langgraph-interrupts + - name: interrupts path: cockpit/langgraph/interrupts/python - - name: langgraph-memory + - name: memory path: cockpit/langgraph/memory/python - - name: langgraph-durable-execution + - name: durable-execution path: cockpit/langgraph/durable-execution/python - - name: langgraph-subgraphs + - name: subgraphs path: cockpit/langgraph/subgraphs/python - - name: langgraph-time-travel + - name: time-travel path: cockpit/langgraph/time-travel/python - - name: langgraph-deployment-runtime + - name: deployment-runtime path: cockpit/langgraph/deployment-runtime/python - - name: deep-agents-planning + - name: planning path: cockpit/deep-agents/planning/python - - name: deep-agents-filesystem + - name: filesystem path: cockpit/deep-agents/filesystem/python - - name: deep-agents-subagents + - name: da-subagents path: cockpit/deep-agents/subagents/python - - name: deep-agents-memory + - name: da-memory path: cockpit/deep-agents/memory/python - - name: deep-agents-skills + - name: skills path: cockpit/deep-agents/skills/python - - name: deep-agents-sandboxes + - name: sandboxes path: cockpit/deep-agents/sandboxes/python steps: - uses: actions/checkout@v6.0.2 @@ -54,14 +54,25 @@ jobs: with: python-version: '3.12' - - name: Install langgraph-cli - run: pip install langgraph-cli + - name: Install uv + run: pip install uv + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: Write .env for deployment + if: | + github.event_name == 'workflow_dispatch' && (inputs.capability == '' || contains(matrix.path, inputs.capability)) + || github.event_name == 'push' + working-directory: ${{ matrix.path }} + run: | + echo "OPENAI_API_KEY=${{ secrets.OPENAI_API_KEY }}" > .env - name: Deploy ${{ matrix.name }} if: | github.event_name == 'workflow_dispatch' && (inputs.capability == '' || contains(matrix.path, inputs.capability)) || github.event_name == 'push' working-directory: ${{ matrix.path }} - run: langgraph deploy + run: uv run --with langgraph-cli langgraph deploy --name ${{ matrix.name }} --no-wait env: LANGSMITH_API_KEY: ${{ secrets.LANGSMITH_API_KEY }} diff --git a/cockpit/deep-agents/filesystem/angular/src/app/filesystem.component.ts b/cockpit/deep-agents/filesystem/angular/src/app/filesystem.component.ts index 1a3748729..16bffd58d 100644 --- a/cockpit/deep-agents/filesystem/angular/src/app/filesystem.component.ts +++ b/cockpit/deep-agents/filesystem/angular/src/app/filesystem.component.ts @@ -1,33 +1,58 @@ -// SPDX-License-Identifier: PolyForm-Noncommercial-1.0.0 import { Component, computed } from '@angular/core'; -import { ChatDebugComponent } from '@cacheplane/chat'; +import { LegacyChatComponent } from '@cacheplane/chat'; import { streamResource } from '@cacheplane/stream-resource'; -import { AIMessage } from '@langchain/core/messages'; import { environment } from '../environments/environment'; +interface ToolCallEntry { + name: string; + args: string; + result?: string; +} + +/** + * FilesystemComponent demonstrates agent file operations. + * + * The agent can read and write files using tool calls. The sidebar + * shows a real-time log of each file operation as it happens. + * + * Key integration points: + * - `stream.messages()` contains all messages including tool call results + * - `computed()` derives tool call entries from AI messages + * - Tool calls update reactively as the agent performs file operations + */ @Component({ selector: 'app-filesystem', standalone: true, - imports: [ChatDebugComponent], + imports: [LegacyChatComponent], template: ` -
- - -
+ @empty { +

Ask the agent to read or write a file.

+ } + + `, }) export class FilesystemComponent { @@ -36,17 +61,29 @@ export class FilesystemComponent { assistantId: environment.streamingAssistantId, }); - protected readonly fileOps = computed(() => { - const messages = this.stream.messages(); - const ops: { name: string; path: string }[] = []; - for (const msg of messages) { - if (!(msg instanceof AIMessage)) continue; - for (const tc of this.stream.getToolCalls(msg)) { - if (tc.call.name === 'read_file' || tc.call.name === 'write_file') { - ops.push({ name: tc.call.name, path: (tc.call.args as Record)?.['path'] ?? '' }); + toolCallEntries = computed(() => { + const msg = this.stream.messages(); + const calls: ToolCallEntry[] = []; + for (const m of msg) { + if ((m as any).tool_calls) { + for (const tc of (m as any).tool_calls) { + calls.push({ name: tc.name, args: JSON.stringify(tc.args), result: tc.output }); } } } - return ops; + return calls; }); + + getFilePath(args: string): string { + try { + const parsed = JSON.parse(args); + return parsed.path ?? args; + } catch { + return args; + } + } + + send(text: string): void { + this.stream.submit({ messages: [{ role: 'human', content: text }] }); + } } diff --git a/cockpit/deep-agents/filesystem/angular/src/environments/environment.ts b/cockpit/deep-agents/filesystem/angular/src/environments/environment.ts index eec661987..3569a83ba 100644 --- a/cockpit/deep-agents/filesystem/angular/src/environments/environment.ts +++ b/cockpit/deep-agents/filesystem/angular/src/environments/environment.ts @@ -1,11 +1,11 @@ /** * Production environment configuration. * - * Points to the LangGraph Cloud deployment managed by LangSmith. - * The assistantId must match the graph name in langgraph.json. + * Uses relative /api URL โ€” Vercel middleware proxies to LangGraph Cloud + * and injects the x-api-key header server-side. */ export const environment = { production: true, - langGraphApiUrl: 'https://filesystem-2330285f57625bff8654bc026f70a6ae.us.langgraph.app', + langGraphApiUrl: '/api', streamingAssistantId: 'filesystem', }; diff --git a/cockpit/deep-agents/filesystem/python/langgraph.json b/cockpit/deep-agents/filesystem/python/langgraph.json index 1844f2c4a..140647f99 100644 --- a/cockpit/deep-agents/filesystem/python/langgraph.json +++ b/cockpit/deep-agents/filesystem/python/langgraph.json @@ -2,6 +2,9 @@ "graphs": { "filesystem": "./src/graph.py:graph" }, - "dependencies": ["."], - "python_version": "3.12" + "dependencies": [ + "." + ], + "python_version": "3.12", + "env": ".env" } diff --git a/cockpit/deep-agents/memory/angular/src/app/memory.component.ts b/cockpit/deep-agents/memory/angular/src/app/memory.component.ts index 279f044d8..c6c60fdf9 100644 --- a/cockpit/deep-agents/memory/angular/src/app/memory.component.ts +++ b/cockpit/deep-agents/memory/angular/src/app/memory.component.ts @@ -1,31 +1,43 @@ -// SPDX-License-Identifier: PolyForm-Noncommercial-1.0.0 import { Component, computed } from '@angular/core'; -import { ChatDebugComponent } from '@cacheplane/chat'; +import { LegacyChatComponent } from '@cacheplane/chat'; import { streamResource } from '@cacheplane/stream-resource'; import { environment } from '../environments/environment'; +/** + * MemoryComponent demonstrates persistent agent memory across sessions. + * + * The agent extracts facts about the user from each conversation turn + * and stores them in `agent_memory` state. The sidebar shows all learned + * facts in real time as the agent updates its memory. + * + * Key integration points: + * - `stream.value()` contains the agent state including `agent_memory` + * - `computed()` derives key/value pairs for the sidebar + * - Memory entries update reactively as the agent learns new facts + */ @Component({ selector: 'app-da-memory', standalone: true, - imports: [ChatDebugComponent], + imports: [LegacyChatComponent], template: ` -
- - -
+ @empty { +

Tell the agent something about yourself to see it remember.

+ } + + `, }) export class MemoryComponent { @@ -34,10 +46,12 @@ export class MemoryComponent { assistantId: environment.streamingAssistantId, }); - protected readonly memoryEntries = computed(() => { - const val = this.stream.value() as Record; - const mem = val?.['agent_memory']; - if (!mem || typeof mem !== 'object') return []; - return Object.entries(mem as Record); + memoryEntries = computed(() => { + const val = this.stream.value() as { agent_memory?: Record } | undefined; + return Object.entries(val?.agent_memory ?? {}); }); + + send(text: string): void { + this.stream.submit({ messages: [{ role: 'human', content: text }] }); + } } diff --git a/cockpit/deep-agents/memory/angular/src/environments/environment.ts b/cockpit/deep-agents/memory/angular/src/environments/environment.ts index 8b9dea6b0..ea34f08ff 100644 --- a/cockpit/deep-agents/memory/angular/src/environments/environment.ts +++ b/cockpit/deep-agents/memory/angular/src/environments/environment.ts @@ -1,11 +1,11 @@ /** * Production environment configuration. * - * Points to the LangGraph Cloud deployment managed by LangSmith. - * The assistantId must match the graph name in langgraph.json. + * Uses relative /api URL โ€” Vercel middleware proxies to LangGraph Cloud + * and injects the x-api-key header server-side. */ export const environment = { production: true, - langGraphApiUrl: 'https://da-memory-15f767adfa6f5cd48bd45a0fa4db29b5.us.langgraph.app', + langGraphApiUrl: '/api', streamingAssistantId: 'da-memory', }; diff --git a/cockpit/deep-agents/memory/python/langgraph.json b/cockpit/deep-agents/memory/python/langgraph.json index 0b71d8f3c..aab93ac3a 100644 --- a/cockpit/deep-agents/memory/python/langgraph.json +++ b/cockpit/deep-agents/memory/python/langgraph.json @@ -2,6 +2,9 @@ "graphs": { "da-memory": "./src/graph.py:graph" }, - "dependencies": ["."], - "python_version": "3.12" + "dependencies": [ + "." + ], + "python_version": "3.12", + "env": ".env" } diff --git a/cockpit/deep-agents/planning/angular/src/app/planning.component.ts b/cockpit/deep-agents/planning/angular/src/app/planning.component.ts index deb555a67..6394149e9 100644 --- a/cockpit/deep-agents/planning/angular/src/app/planning.component.ts +++ b/cockpit/deep-agents/planning/angular/src/app/planning.component.ts @@ -1,37 +1,51 @@ -// SPDX-License-Identifier: PolyForm-Noncommercial-1.0.0 import { Component, computed } from '@angular/core'; -import { ChatDebugComponent } from '@cacheplane/chat'; +import { LegacyChatComponent } from '@cacheplane/chat'; import { streamResource } from '@cacheplane/stream-resource'; import { environment } from '../environments/environment'; +interface PlanStep { + title: string; + status: 'pending' | 'running' | 'complete'; +} + +/** + * PlanningComponent demonstrates agent task decomposition. + * + * The agent receives a complex task, breaks it into ordered steps, + * and executes them. The sidebar shows each step's status in real time. + * + * Key integration points: + * - `stream.value()` contains the plan state with step list + * - `computed()` derives the plan steps for the sidebar + * - Steps update reactively as the agent works through them + */ @Component({ selector: 'app-planning', standalone: true, - imports: [ChatDebugComponent], + imports: [LegacyChatComponent], template: ` -
- - -
+ @empty { +

Ask a complex question to see the plan.

+ } + + `, }) export class PlanningComponent { @@ -40,9 +54,12 @@ export class PlanningComponent { assistantId: environment.streamingAssistantId, }); - protected readonly planSteps = computed(() => { - const val = this.stream.value() as Record; - const plan = val?.['plan']; - return Array.isArray(plan) ? plan as { title: string; status: string }[] : []; + planSteps = computed(() => { + const val = this.stream.value() as { plan?: PlanStep[] } | undefined; + return val?.plan ?? []; }); + + send(text: string): void { + this.stream.submit({ messages: [{ role: 'human', content: text }] }); + } } diff --git a/cockpit/deep-agents/planning/angular/src/environments/environment.ts b/cockpit/deep-agents/planning/angular/src/environments/environment.ts index 28fa26ba9..d0950d618 100644 --- a/cockpit/deep-agents/planning/angular/src/environments/environment.ts +++ b/cockpit/deep-agents/planning/angular/src/environments/environment.ts @@ -1,11 +1,11 @@ /** * Production environment configuration. * - * Points to the LangGraph Cloud deployment managed by LangSmith. - * The assistantId must match the graph name in langgraph.json. + * Uses relative /api URL โ€” Vercel middleware proxies to LangGraph Cloud + * and injects the x-api-key header server-side. */ export const environment = { production: true, - langGraphApiUrl: 'https://planning-7ca04c65ce7650048ec0d16fb96a7638.us.langgraph.app', + langGraphApiUrl: '/api', streamingAssistantId: 'planning', }; diff --git a/cockpit/deep-agents/planning/python/langgraph.json b/cockpit/deep-agents/planning/python/langgraph.json index d7e8c1663..72a31545d 100644 --- a/cockpit/deep-agents/planning/python/langgraph.json +++ b/cockpit/deep-agents/planning/python/langgraph.json @@ -2,6 +2,9 @@ "graphs": { "planning": "./src/graph.py:graph" }, - "dependencies": ["."], - "python_version": "3.12" + "dependencies": [ + "." + ], + "python_version": "3.12", + "env": ".env" } diff --git a/cockpit/deep-agents/sandboxes/angular/src/app/sandboxes.component.ts b/cockpit/deep-agents/sandboxes/angular/src/app/sandboxes.component.ts index 458a7cd56..ab8e8ab75 100644 --- a/cockpit/deep-agents/sandboxes/angular/src/app/sandboxes.component.ts +++ b/cockpit/deep-agents/sandboxes/angular/src/app/sandboxes.component.ts @@ -1,46 +1,59 @@ -// SPDX-License-Identifier: PolyForm-Noncommercial-1.0.0 import { Component, computed } from '@angular/core'; -import { ChatDebugComponent } from '@cacheplane/chat'; +import { LegacyChatComponent } from '@cacheplane/chat'; import { streamResource } from '@cacheplane/stream-resource'; -import { AIMessage } from '@langchain/core/messages'; import { environment } from '../environments/environment'; +interface ExecutionLog { + code: string; + stdout: string; + exitStatus: number; +} + +/** + * SandboxesComponent demonstrates a coding agent that executes Python code. + * + * The agent writes and runs code snippets to solve problems using a + * `run_code` tool. The sidebar shows execution logs โ€” code input, stdout + * output, and exit status โ€” for each sandbox execution. + * + * Key integration points: + * - `stream.messages()` contains all messages including tool call results + * - `computed()` derives execution log entries from tool calls in AI messages + * - Logs update reactively as the agent writes and runs code + */ @Component({ selector: 'app-sandboxes', standalone: true, - imports: [ChatDebugComponent], + imports: [LegacyChatComponent], template: ` -
- - -
+ @empty { +

Ask the agent to write and run Python code.

+ } + + `, }) export class SandboxesComponent { @@ -49,27 +62,35 @@ export class SandboxesComponent { assistantId: environment.streamingAssistantId, }); - protected readonly execLogs = computed(() => { - const messages = this.stream.messages(); - const logs: { code: string; stdout: string; exitStatus: number }[] = []; - for (const msg of messages) { - if (!(msg instanceof AIMessage)) continue; - for (const tc of this.stream.getToolCalls(msg)) { - if (tc.call.name === 'run_code') { - const resultIdx = messages.indexOf(msg) + 1; - const resultMsg = messages[resultIdx]; - let stdout = '', exitStatus = 0; - if (resultMsg && typeof resultMsg.content === 'string') { + executionLogs = computed(() => { + const msgs = this.stream.messages(); + const logs: ExecutionLog[] = []; + for (const m of msgs) { + if ((m as any).tool_calls) { + for (const tc of (m as any).tool_calls) { + if (tc.name === 'run_code' && tc.output) { try { - const parsed = JSON.parse(resultMsg.content); - stdout = parsed.stdout ?? ''; - exitStatus = parsed.exit_status ?? 0; - } catch { /* ignore */ } + const parsed = JSON.parse(tc.output); + logs.push({ + code: tc.args?.code ?? '', + stdout: parsed.stdout ?? '', + exitStatus: parsed.exit_status ?? 0, + }); + } catch { + logs.push({ + code: tc.args?.code ?? '', + stdout: tc.output, + exitStatus: 0, + }); + } } - logs.push({ code: (tc.call.args as Record)?.['code'] ?? '', stdout, exitStatus }); } } } return logs; }); + + send(text: string): void { + this.stream.submit({ messages: [{ role: 'human', content: text }] }); + } } diff --git a/cockpit/deep-agents/sandboxes/angular/src/environments/environment.ts b/cockpit/deep-agents/sandboxes/angular/src/environments/environment.ts index 8c4a57c37..336dc7f42 100644 --- a/cockpit/deep-agents/sandboxes/angular/src/environments/environment.ts +++ b/cockpit/deep-agents/sandboxes/angular/src/environments/environment.ts @@ -1,11 +1,11 @@ /** * Production environment configuration. * - * Points to the LangGraph Cloud deployment managed by LangSmith. - * The assistantId must match the graph name in langgraph.json. + * Uses relative /api URL โ€” Vercel middleware proxies to LangGraph Cloud + * and injects the x-api-key header server-side. */ export const environment = { production: true, - langGraphApiUrl: 'https://sandboxes-8c70b6ac20265827aa92397299fcb9f7.us.langgraph.app', + langGraphApiUrl: '/api', streamingAssistantId: 'sandboxes', }; diff --git a/cockpit/deep-agents/sandboxes/python/langgraph.json b/cockpit/deep-agents/sandboxes/python/langgraph.json index 9837ec2d8..9ac35a96c 100644 --- a/cockpit/deep-agents/sandboxes/python/langgraph.json +++ b/cockpit/deep-agents/sandboxes/python/langgraph.json @@ -2,6 +2,9 @@ "graphs": { "sandboxes": "./src/graph.py:graph" }, - "dependencies": ["."], - "python_version": "3.12" + "dependencies": [ + "." + ], + "python_version": "3.12", + "env": ".env" } diff --git a/cockpit/deep-agents/skills/angular/src/app/skills.component.ts b/cockpit/deep-agents/skills/angular/src/app/skills.component.ts index 59e2e0f2f..718ce244f 100644 --- a/cockpit/deep-agents/skills/angular/src/app/skills.component.ts +++ b/cockpit/deep-agents/skills/angular/src/app/skills.component.ts @@ -1,39 +1,66 @@ -// SPDX-License-Identifier: PolyForm-Noncommercial-1.0.0 import { Component, computed } from '@angular/core'; -import { ChatDebugComponent } from '@cacheplane/chat'; +import { LegacyChatComponent } from '@cacheplane/chat'; import { streamResource } from '@cacheplane/stream-resource'; -import { AIMessage } from '@langchain/core/messages'; import { environment } from '../environments/environment'; -const SKILL_ICONS: Record = { - calculator: '๐Ÿงฎ', - word_count: '๐Ÿ”ข', - summarize: '๐Ÿ“', -}; +interface SkillInvocation { + skillName: string; + args: string; + result?: string; +} +/** + * SkillsComponent demonstrates a multi-skill agent with specialized tools. + * + * The agent can calculate math expressions, count words, and summarize text + * by selecting the appropriate skill tool for each user request. The sidebar + * shows each skill invocation as a card with the skill name, input args, + * and result. + * + * Key integration points: + * - `stream.messages()` contains all messages including tool call data + * - `computed()` derives skill invocation cards from tool calls in AI messages + * - Invocations update reactively as the agent calls and receives tool results + */ @Component({ selector: 'app-skills', standalone: true, - imports: [ChatDebugComponent], + imports: [LegacyChatComponent], template: ` -
- - -
+ @empty { +

Ask the agent to calculate, count words, or summarize text.

+ } + + `, }) export class SkillsComponent { @@ -42,18 +69,24 @@ export class SkillsComponent { assistantId: environment.streamingAssistantId, }); - protected readonly skillInvocations = computed(() => { - const messages = this.stream.messages(); - const invocations: { name: string; icon: string }[] = []; - for (const msg of messages) { - if (!(msg instanceof AIMessage)) continue; - for (const tc of this.stream.getToolCalls(msg)) { - const name = tc.call.name; - if (name === 'calculator' || name === 'word_count' || name === 'summarize') { - invocations.push({ name, icon: SKILL_ICONS[name] ?? '๐Ÿ”ง' }); + skillInvocations = computed(() => { + const msgs = this.stream.messages(); + const invocations: SkillInvocation[] = []; + for (const m of msgs) { + if ((m as any).tool_calls) { + for (const tc of (m as any).tool_calls) { + invocations.push({ + skillName: tc.name, + args: JSON.stringify(tc.args), + result: tc.output, + }); } } } return invocations; }); + + send(text: string): void { + this.stream.submit({ messages: [{ role: 'human', content: text }] }); + } } diff --git a/cockpit/deep-agents/skills/angular/src/environments/environment.ts b/cockpit/deep-agents/skills/angular/src/environments/environment.ts index 6c2d35f26..324c28bc5 100644 --- a/cockpit/deep-agents/skills/angular/src/environments/environment.ts +++ b/cockpit/deep-agents/skills/angular/src/environments/environment.ts @@ -1,11 +1,11 @@ /** * Production environment configuration. * - * Points to the LangGraph Cloud deployment managed by LangSmith. - * The assistantId must match the graph name in langgraph.json. + * Uses relative /api URL โ€” Vercel middleware proxies to LangGraph Cloud + * and injects the x-api-key header server-side. */ export const environment = { production: true, - langGraphApiUrl: 'https://skills-802ff50f64325f1ea973cff1c97a49f9.us.langgraph.app', + langGraphApiUrl: '/api', streamingAssistantId: 'skills', }; diff --git a/cockpit/deep-agents/skills/python/langgraph.json b/cockpit/deep-agents/skills/python/langgraph.json index eb40523e8..fad10e4f2 100644 --- a/cockpit/deep-agents/skills/python/langgraph.json +++ b/cockpit/deep-agents/skills/python/langgraph.json @@ -2,6 +2,9 @@ "graphs": { "skills": "./src/graph.py:graph" }, - "dependencies": ["."], - "python_version": "3.12" + "dependencies": [ + "." + ], + "python_version": "3.12", + "env": ".env" } diff --git a/cockpit/deep-agents/subagents/angular/src/app/subagents.component.ts b/cockpit/deep-agents/subagents/angular/src/app/subagents.component.ts index 648aa531a..1868c0a6a 100644 --- a/cockpit/deep-agents/subagents/angular/src/app/subagents.component.ts +++ b/cockpit/deep-agents/subagents/angular/src/app/subagents.component.ts @@ -1,33 +1,56 @@ -// SPDX-License-Identifier: PolyForm-Noncommercial-1.0.0 import { Component, computed } from '@angular/core'; -import { ChatDebugComponent } from '@cacheplane/chat'; +import { LegacyChatComponent } from '@cacheplane/chat'; import { streamResource } from '@cacheplane/stream-resource'; -import { AIMessage } from '@langchain/core/messages'; import { environment } from '../environments/environment'; +/** + * SubagentsComponent demonstrates the Deep Agents subagent delegation pattern. + * + * The orchestrator agent receives a task and delegates subtasks to specialist + * subagents via tool calls. Each tool call spawns a child agent that streams + * its own progress independently. + * + * Key integration points: + * - `stream.subagents()` returns a Map + * - `subagentEntries` derives a sorted array for sidebar rendering + * - Each entry shows the tool call ID (truncated), status badge, and message count + * - Subagent statuses update reactively: pending โ†’ running โ†’ complete + */ @Component({ selector: 'app-subagents', standalone: true, - imports: [ChatDebugComponent], + imports: [LegacyChatComponent], template: ` -
- - -
+ @empty { +

Ask a question to see subagent activity.

+ } + + `, }) export class SubagentsComponent { @@ -36,15 +59,9 @@ export class SubagentsComponent { assistantId: environment.streamingAssistantId, }); - protected readonly delegations = computed(() => { - const messages = this.stream.messages(); - const entries: { name: string }[] = []; - for (const msg of messages) { - if (!(msg instanceof AIMessage)) continue; - for (const tc of this.stream.getToolCalls(msg)) { - entries.push({ name: tc.call.name }); - } - } - return entries; - }); + subagentEntries = computed(() => Array.from(this.stream.subagents().entries())); + + send(text: string): void { + this.stream.submit({ messages: [{ role: 'human', content: text }] }); + } } diff --git a/cockpit/deep-agents/subagents/angular/src/environments/environment.ts b/cockpit/deep-agents/subagents/angular/src/environments/environment.ts index 6764b6c4a..b75c17e23 100644 --- a/cockpit/deep-agents/subagents/angular/src/environments/environment.ts +++ b/cockpit/deep-agents/subagents/angular/src/environments/environment.ts @@ -1,11 +1,11 @@ /** * Production environment configuration. * - * Points to the LangGraph Cloud deployment managed by LangSmith. - * The assistantId must match the graph name in langgraph.json. + * Uses relative /api URL โ€” Vercel middleware proxies to LangGraph Cloud + * and injects the x-api-key header server-side. */ export const environment = { production: true, - langGraphApiUrl: 'https://da-subagents-31e4639441165df7848aaad426e61728.us.langgraph.app', + langGraphApiUrl: '/api', streamingAssistantId: 'subagents', }; diff --git a/cockpit/deep-agents/subagents/python/langgraph.json b/cockpit/deep-agents/subagents/python/langgraph.json index 8fae177b7..aa1ffd6b8 100644 --- a/cockpit/deep-agents/subagents/python/langgraph.json +++ b/cockpit/deep-agents/subagents/python/langgraph.json @@ -2,6 +2,9 @@ "graphs": { "subagents": "./src/graph.py:graph" }, - "dependencies": ["."], - "python_version": "3.12" + "dependencies": [ + "." + ], + "python_version": "3.12", + "env": ".env" } diff --git a/cockpit/langgraph/deployment-runtime/angular/src/environments/environment.ts b/cockpit/langgraph/deployment-runtime/angular/src/environments/environment.ts index 4b03ed66b..f7abe7c21 100644 --- a/cockpit/langgraph/deployment-runtime/angular/src/environments/environment.ts +++ b/cockpit/langgraph/deployment-runtime/angular/src/environments/environment.ts @@ -1,11 +1,11 @@ /** * Production environment configuration. * - * Points to the LangGraph Cloud deployment managed by LangSmith. - * The assistantId must match the graph name in langgraph.json. + * Uses relative /api URL โ€” Vercel middleware proxies to LangGraph Cloud + * and injects the x-api-key header server-side. */ export const environment = { production: true, - langGraphApiUrl: 'https://deployment-runtime-ce6aad33cc10505faca2b6137e76ba35.us.langgraph.app', + langGraphApiUrl: '/api', deploymentRuntimeAssistantId: 'deployment-runtime', }; diff --git a/cockpit/langgraph/deployment-runtime/python/langgraph.json b/cockpit/langgraph/deployment-runtime/python/langgraph.json index 8deb24c0c..78e22f388 100644 --- a/cockpit/langgraph/deployment-runtime/python/langgraph.json +++ b/cockpit/langgraph/deployment-runtime/python/langgraph.json @@ -2,6 +2,9 @@ "graphs": { "deployment-runtime": "./src/graph.py:graph" }, - "dependencies": ["."], - "python_version": "3.12" + "dependencies": [ + "." + ], + "python_version": "3.12", + "env": ".env" } diff --git a/cockpit/langgraph/durable-execution/angular/src/app/durable-execution.component.ts b/cockpit/langgraph/durable-execution/angular/src/app/durable-execution.component.ts index 047325e90..2aa442d12 100644 --- a/cockpit/langgraph/durable-execution/angular/src/app/durable-execution.component.ts +++ b/cockpit/langgraph/durable-execution/angular/src/app/durable-execution.component.ts @@ -1,100 +1,96 @@ -// SPDX-License-Identifier: PolyForm-Noncommercial-1.0.0 +import { Component } from '@angular/core'; +import { LegacyChatComponent } from '@cacheplane/chat'; +import { streamResource } from '@cacheplane/stream-resource'; +import { environment } from '../environments/environment'; + /** - * DurableExecutionComponent demonstrates LangGraph's durable execution model. + * DurableExecutionComponent demonstrates fault-tolerant multi-step execution + * with `streamResource()`. * - * Unlike a stateless API call, a LangGraph graph persists its execution state - * after every node. If the server restarts mid-run, the graph resumes from the - * last completed node โ€” this is "durable execution". + * This example shows how a graph checkpoints at each node, enabling it to + * resume after failures. The sidebar shows execution status in real time: + * - `stream.status()` as a badge (idle/loading/resolved/error) + * - `stream.hasValue()` indicator for received data + * - A "Retry" button that calls `stream.reload()` when `stream.error()` is set * - * This example visualises the pipeline steps (`analyze โ†’ plan โ†’ generate`) and - * tracks which step the agent is currently executing via the `step` state key. - * A retry button is shown when the stream enters an error state, demonstrating - * how `stream.reload()` re-submits the last input to resume a failed run. + * The backend processes each request through three nodes: + * analyze โ†’ plan โ†’ generate + * Each node updates `state.step` so the UI can track progress. */ -import { Component, computed } from '@angular/core'; -import { ChatComponent } from '@cacheplane/chat'; -import { streamResource } from '@cacheplane/stream-resource'; -import { environment } from '../environments/environment'; - @Component({ selector: 'app-durable-execution', standalone: true, - imports: [ChatComponent], + imports: [LegacyChatComponent], template: ` -
- -
- -
- @for (step of steps; track step) { -
- - - - {{ step }} - -
- @if (!$last) { - โ†’ - } - } + + +

Execution Status

+ +
+ Status +
+ + {{ stream.status() }} + +
- -
- - {{ stream.status() }} - - @if (stream.error()) { - - } +
+ Data Received +
+ + {{ stream.hasValue() ? 'Yes' : 'No' }} +
-
- - -
+ @if (stream.error()) { +
+
Execution Failed
+ +
+ } + + `, }) export class DurableExecutionComponent { - protected readonly steps = ['analyze', 'plan', 'generate']; - + /** + * The streaming resource backing this durable-execution demo. + * + * The graph runs three nodes (analyze โ†’ plan โ†’ generate), checkpointing + * after each one. If the graph fails partway through, `stream.reload()` + * re-submits the last input so the run can resume from the last checkpoint. + */ protected readonly stream = streamResource({ apiUrl: environment.langGraphApiUrl, assistantId: environment.streamingAssistantId, }); - protected readonly currentStep = computed(() => { - const val = this.stream.value() as Record; - return (val?.['step'] as string) ?? ''; - }); - - protected isStepComplete(step: string): boolean { - const idx = this.steps.indexOf(step); - const currentIdx = this.steps.indexOf(this.currentStep()); - return currentIdx > idx; + /** + * Submit a message to be processed through the multi-node graph. + */ + send(text: string): void { + this.stream.submit({ messages: [{ role: 'human', content: text }] }); } - protected statusColor(): string { + /** + * Returns a colour for the status badge based on the current stream status. + */ + statusBadgeColor(): string { switch (this.stream.status()) { case 'loading': - case 'reloading': - return '#2563eb'; - case 'resolved': - return '#16a34a'; - case 'error': - return '#dc2626'; - default: - return '#6b7280'; + case 'reloading': return '#2563eb'; + case 'resolved': return '#16a34a'; + case 'error': return '#dc2626'; + default: return '#6b7280'; } } } diff --git a/cockpit/langgraph/durable-execution/angular/src/environments/environment.ts b/cockpit/langgraph/durable-execution/angular/src/environments/environment.ts index 218ea72c1..8d9bc9d02 100644 --- a/cockpit/langgraph/durable-execution/angular/src/environments/environment.ts +++ b/cockpit/langgraph/durable-execution/angular/src/environments/environment.ts @@ -1,11 +1,11 @@ /** * Production environment configuration. * - * Points to the LangGraph Cloud deployment managed by LangSmith. - * The assistantId must match the graph name in langgraph.json. + * Uses relative /api URL โ€” Vercel middleware proxies to LangGraph Cloud + * and injects the x-api-key header server-side. */ export const environment = { production: true, - langGraphApiUrl: 'https://durable-execution-123221d8b543545399d252dc6bd7de1b.us.langgraph.app', + langGraphApiUrl: '/api', streamingAssistantId: 'durable-execution', }; diff --git a/cockpit/langgraph/durable-execution/python/langgraph.json b/cockpit/langgraph/durable-execution/python/langgraph.json index 28e64c336..51a9a41c8 100644 --- a/cockpit/langgraph/durable-execution/python/langgraph.json +++ b/cockpit/langgraph/durable-execution/python/langgraph.json @@ -2,6 +2,9 @@ "graphs": { "durable-execution": "./src/graph.py:graph" }, - "dependencies": ["."], - "python_version": "3.12" + "dependencies": [ + "." + ], + "python_version": "3.12", + "env": ".env" } diff --git a/cockpit/langgraph/interrupts/angular/src/environments/environment.ts b/cockpit/langgraph/interrupts/angular/src/environments/environment.ts index 5ac9d8cdc..4c682ee31 100644 --- a/cockpit/langgraph/interrupts/angular/src/environments/environment.ts +++ b/cockpit/langgraph/interrupts/angular/src/environments/environment.ts @@ -1,11 +1,11 @@ /** * Production environment configuration. * - * Points to the LangGraph Cloud deployment managed by LangSmith. - * The assistantId must match the graph name in langgraph.json. + * Uses relative /api URL โ€” Vercel middleware proxies to LangGraph Cloud + * and injects the x-api-key header server-side. */ export const environment = { production: true, - langGraphApiUrl: 'https://interrupts-8e1524d6d8fb558381eed4618129bc50.us.langgraph.app', + langGraphApiUrl: '/api', streamingAssistantId: 'interrupts', }; diff --git a/cockpit/langgraph/interrupts/python/langgraph.json b/cockpit/langgraph/interrupts/python/langgraph.json index e989f3a9b..c391b201f 100644 --- a/cockpit/langgraph/interrupts/python/langgraph.json +++ b/cockpit/langgraph/interrupts/python/langgraph.json @@ -2,6 +2,9 @@ "graphs": { "interrupts": "./src/graph.py:graph" }, - "dependencies": ["."], - "python_version": "3.12" + "dependencies": [ + "." + ], + "python_version": "3.12", + "env": ".env" } diff --git a/cockpit/langgraph/memory/angular/src/environments/environment.ts b/cockpit/langgraph/memory/angular/src/environments/environment.ts index b3186b0f2..94dfecc5c 100644 --- a/cockpit/langgraph/memory/angular/src/environments/environment.ts +++ b/cockpit/langgraph/memory/angular/src/environments/environment.ts @@ -1,11 +1,11 @@ /** * Production environment configuration. * - * Points to the LangGraph Cloud deployment managed by LangSmith. - * The assistantId must match the graph name in langgraph.json. + * Uses relative /api URL โ€” Vercel middleware proxies to LangGraph Cloud + * and injects the x-api-key header server-side. */ export const environment = { production: true, - langGraphApiUrl: 'https://memory-1b3234dbe2e55ba59010b3469be45a0a.us.langgraph.app', + langGraphApiUrl: '/api', streamingAssistantId: 'memory', }; diff --git a/cockpit/langgraph/memory/python/langgraph.json b/cockpit/langgraph/memory/python/langgraph.json index 260737556..47ad1850c 100644 --- a/cockpit/langgraph/memory/python/langgraph.json +++ b/cockpit/langgraph/memory/python/langgraph.json @@ -2,6 +2,9 @@ "graphs": { "memory": "./src/graph.py:graph" }, - "dependencies": ["."], - "python_version": "3.12" + "dependencies": [ + "." + ], + "python_version": "3.12", + "env": ".env" } diff --git a/cockpit/langgraph/persistence/angular/src/app/persistence.component.ts b/cockpit/langgraph/persistence/angular/src/app/persistence.component.ts index e8f31bd01..aa7f77f42 100644 --- a/cockpit/langgraph/persistence/angular/src/app/persistence.component.ts +++ b/cockpit/langgraph/persistence/angular/src/app/persistence.component.ts @@ -1,57 +1,88 @@ -// SPDX-License-Identifier: PolyForm-Noncommercial-1.0.0 +import { Component } from '@angular/core'; +import { LegacyChatComponent } from '@cacheplane/chat'; +import { streamResource } from '@cacheplane/stream-resource'; +import { environment } from '../environments/environment'; + /** - * PersistenceComponent demonstrates LangGraph's thread-based persistence. + * PersistenceComponent demonstrates thread persistence with `streamResource()`. * - * Each conversation is stored as a "thread" on the LangGraph backend. Threads - * survive page refreshes and can be resumed at any time by switching back to - * them. This example tracks created threads in a local signal and lets the - * user switch between them via the chat sidebar. + * This example shows how conversations persist across browser refreshes. + * Each thread has a unique ID that can be stored and resumed later. + * Use `stream.switchThread(id)` to load a previous conversation, + * or `stream.switchThread(null)` to start fresh. * * Key integration points: - * - `threadId: null` โ€” lets streamResource auto-create a new thread on first submit - * - `onThreadId` โ€” called once the backend assigns a thread ID; used here to - * add the thread to the local list and set it as active - * - `stream.switchThread(id)` โ€” reconnects the resource to an existing thread + * - `onThreadId` callback captures new thread IDs for storage + * - `stream.switchThread(id)` resumes a previous conversation + * - `stream.messages()` loads the full history when switching threads */ -import { Component, signal } from '@angular/core'; -import { ChatComponent, type Thread } from '@cacheplane/chat'; -import { streamResource } from '@cacheplane/stream-resource'; -import { environment } from '../environments/environment'; - @Component({ selector: 'app-persistence', standalone: true, - imports: [ChatComponent], + imports: [LegacyChatComponent], template: ` - + + +

Threads

+ @for (id of threadIds; track id) { + + } + +
+
`, }) export class PersistenceComponent { - protected readonly threads = signal([]); - protected readonly activeThreadId = signal(''); - + /** + * The streaming resource with thread persistence. + * + * The `onThreadId` callback fires when a new thread is created, + * allowing us to track thread IDs for the sidebar picker. + */ protected readonly stream = streamResource({ apiUrl: environment.langGraphApiUrl, assistantId: environment.streamingAssistantId, - threadId: null, - onThreadId: (id: string) => this.trackThread(id), + onThreadId: (id: string) => { + this.currentThreadId = id; + if (!this.threadIds.includes(id)) this.threadIds.push(id); + }, }); - private trackThread(id: string): void { - this.activeThreadId.set(id); - if (!this.threads().find(t => t.id === id)) { - this.threads.update(list => [...list, { id }]); - } + threadIds: string[] = []; + currentThreadId = ''; + + /** + * Submit a message to the current thread. + */ + send(text: string): void { + this.stream.submit({ messages: [{ role: 'human', content: text }] }); } - protected onThreadSelected(id: string): void { - this.activeThreadId.set(id); + /** + * Switch to an existing thread, loading its full message history. + */ + selectThread(id: string): void { + this.currentThreadId = id; this.stream.switchThread(id); } + + /** + * Start a new conversation thread. + */ + newThread(): void { + this.currentThreadId = ''; + this.stream.switchThread(null); + } } diff --git a/cockpit/langgraph/persistence/angular/src/environments/environment.ts b/cockpit/langgraph/persistence/angular/src/environments/environment.ts index ddc3351b6..a7fd6b95f 100644 --- a/cockpit/langgraph/persistence/angular/src/environments/environment.ts +++ b/cockpit/langgraph/persistence/angular/src/environments/environment.ts @@ -1,11 +1,11 @@ /** * Production environment configuration. * - * Points to the LangGraph Cloud deployment managed by LangSmith. - * The assistantId must match the graph name in langgraph.json. + * Uses relative /api URL โ€” Vercel middleware proxies to LangGraph Cloud + * and injects the x-api-key header server-side. */ export const environment = { production: true, - langGraphApiUrl: 'https://persistence-b4038c008b5e537787dda6a6774c8f91.us.langgraph.app', + langGraphApiUrl: '/api', streamingAssistantId: 'persistence', }; diff --git a/cockpit/langgraph/persistence/python/langgraph.json b/cockpit/langgraph/persistence/python/langgraph.json index 8cda741a2..ceb92fbd7 100644 --- a/cockpit/langgraph/persistence/python/langgraph.json +++ b/cockpit/langgraph/persistence/python/langgraph.json @@ -2,6 +2,9 @@ "graphs": { "persistence": "./src/graph.py:graph" }, - "dependencies": ["."], - "python_version": "3.12" + "dependencies": [ + "." + ], + "python_version": "3.12", + "env": ".env" } diff --git a/cockpit/langgraph/streaming/angular/src/environments/environment.ts b/cockpit/langgraph/streaming/angular/src/environments/environment.ts index f2d9ce265..85fd3ffa6 100644 --- a/cockpit/langgraph/streaming/angular/src/environments/environment.ts +++ b/cockpit/langgraph/streaming/angular/src/environments/environment.ts @@ -1,11 +1,11 @@ /** * Production environment configuration. * - * Points to the LangGraph Cloud deployment managed by LangSmith. - * The assistantId must match the graph name in langgraph.json. + * Uses relative /api URL โ€” Vercel middleware proxies to LangGraph Cloud + * and injects the x-api-key header server-side. */ export const environment = { production: true, - langGraphApiUrl: 'https://streaming-b01895ee8c8d5211967fba7a64c55db8.us.langgraph.app', + langGraphApiUrl: '/api', streamingAssistantId: 'streaming', }; diff --git a/cockpit/langgraph/streaming/python/langgraph.json b/cockpit/langgraph/streaming/python/langgraph.json index 37582903e..29997e521 100644 --- a/cockpit/langgraph/streaming/python/langgraph.json +++ b/cockpit/langgraph/streaming/python/langgraph.json @@ -2,6 +2,9 @@ "graphs": { "streaming": "./src/graph.py:graph" }, - "dependencies": ["."], - "python_version": "3.12" + "dependencies": [ + "." + ], + "python_version": "3.12", + "env": ".env" } diff --git a/cockpit/langgraph/subgraphs/angular/src/environments/environment.ts b/cockpit/langgraph/subgraphs/angular/src/environments/environment.ts index 684715bd8..a3c4d0636 100644 --- a/cockpit/langgraph/subgraphs/angular/src/environments/environment.ts +++ b/cockpit/langgraph/subgraphs/angular/src/environments/environment.ts @@ -1,11 +1,11 @@ /** * Production environment configuration. * - * Points to the LangGraph Cloud deployment managed by LangSmith. - * The assistantId must match the graph name in langgraph.json. + * Uses relative /api URL โ€” Vercel middleware proxies to LangGraph Cloud + * and injects the x-api-key header server-side. */ export const environment = { production: true, - langGraphApiUrl: 'https://subgraphs-c923bcb068c458b09d789f147875f426.us.langgraph.app', + langGraphApiUrl: '/api', streamingAssistantId: 'subgraphs', }; diff --git a/cockpit/langgraph/subgraphs/python/langgraph.json b/cockpit/langgraph/subgraphs/python/langgraph.json index 40d11fe9f..e2126b1ef 100644 --- a/cockpit/langgraph/subgraphs/python/langgraph.json +++ b/cockpit/langgraph/subgraphs/python/langgraph.json @@ -2,6 +2,9 @@ "graphs": { "subgraphs": "./src/graph.py:graph" }, - "dependencies": ["."], - "python_version": "3.12" + "dependencies": [ + "." + ], + "python_version": "3.12", + "env": ".env" } diff --git a/cockpit/langgraph/time-travel/angular/src/app/time-travel.component.ts b/cockpit/langgraph/time-travel/angular/src/app/time-travel.component.ts index 331417862..17759117d 100644 --- a/cockpit/langgraph/time-travel/angular/src/app/time-travel.component.ts +++ b/cockpit/langgraph/time-travel/angular/src/app/time-travel.component.ts @@ -1,66 +1,84 @@ -// SPDX-License-Identifier: PolyForm-Noncommercial-1.0.0 -/** - * TimeTravelComponent demonstrates LangGraph's checkpoint and time-travel API. - * - * Every time the agent sends a response, LangGraph saves a checkpoint โ€” a - * snapshot of the full conversation state at that moment. Time travel lets - * you jump back to any checkpoint and continue from there. - * - * Two modes are exposed: - * - **Replay**: re-runs the graph from the selected checkpoint with the same - * input, producing the same (or a different, if non-deterministic) output. - * - **Fork**: sets the active branch to the checkpoint so the *next* submit() - * starts a new conversation branch diverging from that point. - * - * Both modes call `stream.setBranch(checkpointId)` under the hood; the - * difference is only conceptual and reflected in how the user interacts next. - */ import { Component } from '@angular/core'; -import { ChatComponent, ChatTimelineSliderComponent } from '@cacheplane/chat'; +import { LegacyChatComponent } from '@cacheplane/chat'; import { streamResource } from '@cacheplane/stream-resource'; import { environment } from '../environments/environment'; +/** + * TimeTravelComponent demonstrates replaying and branching conversation history. + * + * Key integration points: + * - `stream.history()` โ€” array of ThreadState snapshots + * - `stream.branch()` โ€” current branch identifier + * - `stream.setBranch(id)` โ€” switch to a different checkpoint + */ @Component({ selector: 'app-time-travel', standalone: true, - imports: [ChatComponent, ChatTimelineSliderComponent], + imports: [LegacyChatComponent], template: ` -
- - -
+ + +

History

+ @for (state of stream.history(); track $index) { + + } + @if (stream.history().length === 0) { +

No history yet. Send a message to begin.

+ } +
+
`, }) export class TimeTravelComponent { + /** + * The streaming resource with checkpointing enabled. + * + * `stream.history()` provides an array of ThreadState snapshots for + * the current thread. `stream.branch()` tracks the active checkpoint. + * Call `stream.setBranch(checkpointId)` to replay from a past state. + */ protected readonly stream = streamResource({ apiUrl: environment.langGraphApiUrl, assistantId: environment.streamingAssistantId, }); /** - * Replay: sets the branch to replay from this checkpoint. - * The graph re-runs from this point with the same input. + * Submit a message to the current thread. + */ + send(text: string): void { + this.stream.submit({ messages: [{ role: 'human', content: text }] }); + } + + /** + * Branch the conversation from the selected checkpoint. + * After calling setBranch, the next submit will fork from that point. */ - protected onReplay(checkpointId: string): void { - this.stream.setBranch(checkpointId); + selectCheckpoint(state: { checkpoint_id?: string }): void { + if (state.checkpoint_id) { + this.stream.setBranch(state.checkpoint_id); + } } /** - * Fork: sets the branch, then the next submit() creates a new - * conversation branch diverging from this checkpoint. + * Format a checkpoint for display in the sidebar. */ - protected onFork(checkpointId: string): void { - this.stream.setBranch(checkpointId); + formatCheckpoint(state: { checkpoint_id?: string; created_at?: string }): string { + const id = state.checkpoint_id ?? 'unknown'; + const short = id.substring(0, 8); + if (state.created_at) { + const ts = new Date(state.created_at).toLocaleTimeString(); + return `${short}... @ ${ts}`; + } + return `${short}...`; } } diff --git a/cockpit/langgraph/time-travel/angular/src/environments/environment.ts b/cockpit/langgraph/time-travel/angular/src/environments/environment.ts index 402a818f9..0593b3c66 100644 --- a/cockpit/langgraph/time-travel/angular/src/environments/environment.ts +++ b/cockpit/langgraph/time-travel/angular/src/environments/environment.ts @@ -1,11 +1,11 @@ /** * Production environment configuration. * - * Points to the LangGraph Cloud deployment managed by LangSmith. - * The assistantId must match the graph name in langgraph.json. + * Uses relative /api URL โ€” Vercel middleware proxies to LangGraph Cloud + * and injects the x-api-key header server-side. */ export const environment = { production: true, - langGraphApiUrl: 'https://time-travel-f206148d75f45e75bf30002e68e1b14d.us.langgraph.app', + langGraphApiUrl: '/api', streamingAssistantId: 'time-travel', }; diff --git a/cockpit/langgraph/time-travel/python/langgraph.json b/cockpit/langgraph/time-travel/python/langgraph.json index 2616ce114..ad5e6ed3e 100644 --- a/cockpit/langgraph/time-travel/python/langgraph.json +++ b/cockpit/langgraph/time-travel/python/langgraph.json @@ -2,6 +2,9 @@ "graphs": { "time-travel": "./src/graph.py:graph" }, - "dependencies": ["."], - "python_version": "3.12" + "dependencies": [ + "." + ], + "python_version": "3.12", + "env": ".env" } diff --git a/docs/superpowers/specs/2026-04-05-cockpit-examples-tier1-customization.md b/docs/superpowers/specs/2026-04-05-cockpit-examples-tier1-customization.md deleted file mode 100644 index c296007c4..000000000 --- a/docs/superpowers/specs/2026-04-05-cockpit-examples-tier1-customization.md +++ /dev/null @@ -1,249 +0,0 @@ -# Cockpit Examples Tier 1 Customization - -**Date:** 2026-04-05 -**Status:** Draft -**Scope:** Customize 8 cockpit Angular examples using existing @cacheplane/chat components. No new library features needed. - ---- - -## Overview - -Tier 1 covers examples that work with existing `@cacheplane/chat` and `@cacheplane/stream-resource` APIs. Each gets a capability-specific component that goes beyond the generic `` to showcase what makes that capability unique. - -## Tier Breakdown (for reference) - -- **Tier 1 (this spec):** persistence, deployment-runtime, + 6 deep-agents (planning, filesystem, subagents, memory, skills, sandboxes) -- **Tier 2 (next):** memory (LG), subgraphs, interrupts โ€” need custom sidebar content via template overrides -- **Tier 3 (later):** time-travel, durable-execution โ€” need new library features - -## Example Specifications - -### 1. Persistence (`cockpit/langgraph/persistence/angular`) - -**What it demonstrates:** Thread-based conversation persistence. Resume conversations by thread ID. - -**Component:** Uses `` with thread management inputs. - -```typescript -@Component({ - selector: 'app-persistence', - standalone: true, - imports: [ChatComponent], - template: ` - - `, -}) -export class PersistenceComponent { - protected readonly stream = streamResource({ - apiUrl: environment.langGraphApiUrl, - assistantId: environment.streamingAssistantId, - threadId: null, // auto-create - onThreadId: (id) => this.trackThread(id), - }); - - protected readonly threads = signal([]); - protected readonly activeThreadId = signal(''); - - private trackThread(id: string): void { - this.activeThreadId.set(id); - // Add to thread list if new - if (!this.threads().find(t => t.id === id)) { - this.threads.update(list => [...list, { id }]); - } - } - - protected onThreadSelected(id: string): void { - this.activeThreadId.set(id); - this.stream.switchThread(id); - } -} -``` - -**Key signals:** `stream.switchThread()`, `onThreadId` callback, `[threads]` input on ``. - ---- - -### 2. Deployment Runtime (`cockpit/langgraph/deployment-runtime/angular`) - -**What it demonstrates:** Production deployment config โ€” different assistant ID, environment-based URL. - -**Component:** Uses `` with no extras. The differentiation is in `environment.ts` pointing to a deployed endpoint. - -```typescript -@Component({ - selector: 'app-deployment-runtime', - standalone: true, - imports: [ChatComponent], - template: ``, -}) -export class DeploymentRuntimeComponent { - protected readonly stream = streamResource({ - apiUrl: environment.langGraphApiUrl, - assistantId: environment.deploymentRuntimeAssistantId, - }); -} -``` - -Minimal โ€” same as streaming but with deployment-specific environment config. - ---- - -### 3. Planning (`cockpit/deep-agents/planning/angular`) - -**What it demonstrates:** Task decomposition โ€” agent creates a plan with steps, then executes them. - -**Component:** Uses `` plus a computed signal deriving plan steps from `stream.value()`. - -```typescript -@Component({ - selector: 'app-planning', - standalone: true, - imports: [ChatDebugComponent], - template: ` -
- - @if (planSteps().length > 0) { - - } -
- `, -}) -export class PlanningComponent { - protected readonly stream = streamResource({ - apiUrl: environment.langGraphApiUrl, - assistantId: environment.streamingAssistantId, - }); - - protected readonly planSteps = computed(() => { - const val = this.stream.value() as Record; - const plan = val?.['plan']; - return Array.isArray(plan) ? plan : []; - }); -} -``` - ---- - -### 4. Filesystem (`cockpit/deep-agents/filesystem/angular`) - -**What it demonstrates:** File read/write tool calls. - -**Component:** `` plus computed signal extracting tool calls from messages. - -```typescript -// Same pattern as planning but derives file operations from stream.messages() -protected readonly fileOps = computed(() => { - const messages = this.stream.messages(); - const ops: { name: string; path: string; status: string }[] = []; - for (const msg of messages) { - if ('tool_calls' in msg && Array.isArray((msg as any).tool_calls)) { - for (const tc of (msg as any).tool_calls) { - if (tc.name === 'read_file' || tc.name === 'write_file') { - ops.push({ name: tc.name, path: tc.args?.path ?? '', status: 'done' }); - } - } - } - } - return ops; -}); -``` - -Sidebar shows file operation log with read/write icons. - ---- - -### 5. Subagents (`cockpit/deep-agents/subagents/angular`) - -**What it demonstrates:** Orchestrator delegating to specialist subagents. - -**Component:** `` plus computed signal from `stream.subagents()` or tool calls. - -```typescript -protected readonly subagentEntries = computed(() => { - const messages = this.stream.messages(); - const entries: { name: string; status: string }[] = []; - for (const msg of messages) { - if ('tool_calls' in msg && Array.isArray((msg as any).tool_calls)) { - for (const tc of (msg as any).tool_calls) { - if (['research_agent', 'analysis_agent', 'summary_agent'].includes(tc.name)) { - entries.push({ name: tc.name, status: 'done' }); - } - } - } - } - return entries; -}); -``` - -Sidebar shows subagent delegation cards. - ---- - -### 6. Memory (`cockpit/deep-agents/memory/angular`) - -**What it demonstrates:** Persistent fact extraction across turns. - -**Component:** `` plus computed signal from `stream.value().agent_memory`. - -```typescript -protected readonly memoryEntries = computed(() => { - const val = this.stream.value() as Record; - const mem = val?.['agent_memory']; - if (!mem || typeof mem !== 'object') return []; - return Object.entries(mem as Record); -}); -``` - -Sidebar shows learned facts as key-value pairs. - ---- - -### 7. Skills (`cockpit/deep-agents/skills/angular`) - -**What it demonstrates:** Multi-skill tool selection (calculator, word_count, summarize). - -**Component:** `` plus computed signal extracting skill invocations from messages. - -Same tool-call extraction pattern as filesystem, filtered for calculator/word_count/summarize. - ---- - -### 8. Sandboxes (`cockpit/deep-agents/sandboxes/angular`) - -**What it demonstrates:** Code execution via `run_code` tool. - -**Component:** `` plus computed signal extracting code execution results. - -Sidebar shows execution logs with exit status badges and stdout output. - ---- - -## Common Pattern - -All 6 deep-agents examples follow the same structure: -1. `` for the main chat + debug panel -2. An optional `