From 90e12c33704a7f0ae77eb701ae13518f9add6592 Mon Sep 17 00:00:00 2001 From: Brian Love Date: Sun, 5 Apr 2026 07:16:23 -0700 Subject: [PATCH 01/13] feat(cockpit): complete cockpit application with 14 capability examples (#14) Cockpit is an integrated developer reference surface for @cacheplane/stream-resource. It embeds running Angular examples, shows syntax-highlighted source code, renders tutorial documentation, and displays auto-generated API references for each capability. Shared libraries: - @cacheplane/design-tokens: colors, glass, gradient, glow, typography tokens - @cacheplane/ui-react: GlassPanel, GlassButton, Callout, Steps, Tabs, Card, CodeGroup, NavLink - @cacheplane/chat: Angular chat component library (cp-chat, cp-chat-message, cp-chat-input) 14 capability examples (Angular + Python + docs + e2e each): - LangGraph: streaming, persistence, interrupts, memory, durable-execution, subgraphs, time-travel, deployment-runtime - Deep Agents: planning, filesystem, subagents, memory, skills, sandboxes Unified harness: - Capability registry as single source of truth - Serve orchestrator with --capability and --all modes - Per-capability Nx serve targets - CI build job for all Angular apps - LangGraph deployment matrix for all 14 backends - Smoke e2e test suite (28 tests: 14 UI render + 14 send/receive) Light glassmorphism theme matching the website design with frosted glass panels, warm-to-cool gradient backgrounds, and EB Garamond/Inter/JetBrains Mono typography. From 5fa8e75193ec0317b44af75987d0d6a605fe489e Mon Sep 17 00:00:00 2001 From: Brian Love Date: Tue, 7 Apr 2026 11:44:32 -0700 Subject: [PATCH 02/13] feat: migrate all domains from stream-resource.dev to cacheplane.ai MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - stream-resource.dev → cacheplane.ai - cockpit.stream-resource.dev → cockpit.cacheplane.ai - examples.stream-resource.dev → examples.cacheplane.ai - Updated across all source code, CI workflows, docs, emails, templates - 83 references migrated Co-Authored-By: Claude Opus 4.6 (1M context) --- .claude/launch.json | 6 + .claude/worktrees/blissful-bartik | 1 + .claude/worktrees/optimistic-jang | 1 + .claude/worktrees/website-iteration | 1 + .claude/worktrees/zealous-jones | 1 + AGENTS.md | 79 + README.md | 30 +- apps/cockpit/next-env.d.ts | 2 +- apps/website/src/components/shared/Nav.tsx | 12 +- apps/website/tsconfig.tsbuildinfo | 1 + docs/superpowers/plans/2026-03-19-roadmap.md | 20 +- .../2026-03-19-vercel-deployment-notes.md | 2 +- ...6-03-20-cockpit-phase-1-manifest-and-ia.md | 107 ++ ...-03-20-cockpit-phase-2-harness-and-repo.md | 105 ++ ...026-03-20-cockpit-phase-3-cockpit-shell.md | 100 ++ .../2026-03-20-cockpit-phase-4-docs-system.md | 97 ++ ...ckpit-phase-5-capability-matrix-rollout.md | 113 ++ ...-cockpit-phase-6-testing-and-deployment.md | 100 ++ .../2026-03-20-cockpit-phased-execution.md | 111 ++ .../plans/2026-03-21-agent-first-docs.md | 249 +++ .../plans/2026-03-22-cockpit-ui-refresh.md | 380 +++++ .../plans/2026-04-05-narrative-redesign.md | 1464 +++++++++++++++++ .../plans/2026-04-05-whitepaper-pipeline.md | 746 +++++++++ ...6-04-06-rebrand-angular-stream-resource.md | 142 +- .../2026-03-21-agent-first-docs-design.md | 258 +++ .../2026-03-22-cockpit-ui-refresh-design.md | 241 +++ ...6-04-06-rebrand-angular-stream-resource.md | 54 +- 27 files changed, 4292 insertions(+), 131 deletions(-) create mode 160000 .claude/worktrees/blissful-bartik create mode 160000 .claude/worktrees/optimistic-jang create mode 160000 .claude/worktrees/website-iteration create mode 160000 .claude/worktrees/zealous-jones create mode 100644 AGENTS.md create mode 100644 apps/website/tsconfig.tsbuildinfo create mode 100644 docs/superpowers/plans/2026-03-20-cockpit-phase-1-manifest-and-ia.md create mode 100644 docs/superpowers/plans/2026-03-20-cockpit-phase-2-harness-and-repo.md create mode 100644 docs/superpowers/plans/2026-03-20-cockpit-phase-3-cockpit-shell.md create mode 100644 docs/superpowers/plans/2026-03-20-cockpit-phase-4-docs-system.md create mode 100644 docs/superpowers/plans/2026-03-20-cockpit-phase-5-capability-matrix-rollout.md create mode 100644 docs/superpowers/plans/2026-03-20-cockpit-phase-6-testing-and-deployment.md create mode 100644 docs/superpowers/plans/2026-03-20-cockpit-phased-execution.md create mode 100644 docs/superpowers/plans/2026-03-21-agent-first-docs.md create mode 100644 docs/superpowers/plans/2026-03-22-cockpit-ui-refresh.md create mode 100644 docs/superpowers/plans/2026-04-05-narrative-redesign.md create mode 100644 docs/superpowers/plans/2026-04-05-whitepaper-pipeline.md create mode 100644 docs/superpowers/specs/2026-03-21-agent-first-docs-design.md create mode 100644 docs/superpowers/specs/2026-03-22-cockpit-ui-refresh-design.md diff --git a/.claude/launch.json b/.claude/launch.json index 74c73044d..a4e442b7f 100644 --- a/.claude/launch.json +++ b/.claude/launch.json @@ -13,6 +13,12 @@ "runtimeExecutable": "/bin/bash", "runtimeArgs": ["-c", "export PATH=/Users/blove/.nvm/versions/node/v22.14.0/bin:$PATH && npx nx serve cockpit --port 4201"], "port": 4201 + }, + { + "name": "streaming", + "runtimeExecutable": "/bin/bash", + "runtimeArgs": ["-c", "export PATH=/Users/blove/.nvm/versions/node/v22.14.0/bin:$PATH && npx nx serve cockpit-langgraph-streaming-angular --port 4300"], + "port": 4300 } ] } diff --git a/.claude/worktrees/blissful-bartik b/.claude/worktrees/blissful-bartik new file mode 160000 index 000000000..44e21de99 --- /dev/null +++ b/.claude/worktrees/blissful-bartik @@ -0,0 +1 @@ +Subproject commit 44e21de992ad8c0b01a35d4d71cb41dfeb92cdb8 diff --git a/.claude/worktrees/optimistic-jang b/.claude/worktrees/optimistic-jang new file mode 160000 index 000000000..0a6d25bcb --- /dev/null +++ b/.claude/worktrees/optimistic-jang @@ -0,0 +1 @@ +Subproject commit 0a6d25bcb8d03bd2afe4e5bdd50ee660e6ae4df9 diff --git a/.claude/worktrees/website-iteration b/.claude/worktrees/website-iteration new file mode 160000 index 000000000..510befe14 --- /dev/null +++ b/.claude/worktrees/website-iteration @@ -0,0 +1 @@ +Subproject commit 510befe14ac3482c89f883f52821604f143fa5f2 diff --git a/.claude/worktrees/zealous-jones b/.claude/worktrees/zealous-jones new file mode 160000 index 000000000..413d3fa51 --- /dev/null +++ b/.claude/worktrees/zealous-jones @@ -0,0 +1 @@ +Subproject commit 413d3fa51304735e59092675750c26631dab4ee1 diff --git a/AGENTS.md b/AGENTS.md new file mode 100644 index 000000000..1afcf0505 --- /dev/null +++ b/AGENTS.md @@ -0,0 +1,79 @@ +# Contributor Agent Guide + +This file is for agents working in this repository. It is contributor-facing, not consumer-facing. Keep decisions grounded in the actual repo state, and prefer project-specific instructions over generic agent habits. + +## Scope and Precedence + +- This guide is for contributors working in the monorepo. +- Public-facing agent context lives under `apps/website/public/` and exists for package users, docs readers, and external tooling. +- When instructions conflict, prefer the most local project instruction available. +- Treat reference docs and copied workflows as inputs, not as truth. Verify against the codebase before acting. + +## Operating Principles + +- Do not bluff. If something is uncertain, say what is uncertain and verify it from code, docs, or tooling. +- Do the research yourself when the answer can be found locally or by using current documentation. +- Explore first, then edit. For non-trivial work, inspect the relevant files and form a plan before changing code. +- Use applicable workflow guidance before defaulting to generic implementation habits. +- Keep communication direct, calm, and specific. Avoid performative certainty, unnecessary filler, or aggressive phrasing. + +## Planning and Execution + +- For simple, localized changes, a brief mental plan is fine. +- For anything that spans multiple files, affects behavior, or has unclear boundaries, inspect the codebase and write out a plan before editing. +- Follow existing patterns unless there is a concrete reason to improve them as part of the task. +- Keep changes scoped to the goal. Do not fold unrelated refactors into the same task unless they are required to make the work coherent. +- If you discover the current approach is wrong, adjust course explicitly instead of forcing the original plan through. + +## Commands and Tooling + +- Prefer setting the tool's working directory over shell patterns like `cd path && command`. In Codex, pass `workdir` directly to the command tool. +- Use the repo's actual package manager and task runner. This repo uses `npm` at the root and `nx` for workspace tasks. +- Prefer `rg` and `rg --files` for search. +- Prefer non-interactive commands. +- Avoid destructive git operations unless explicitly requested. +- Do not substitute other runners when the repo already defines the right command. + +## Repo Layout + +- `libs/stream-resource`: main Angular library. +- `apps/website`: docs and marketing site. +- `packages/mcp`: MCP server package. +- `e2e/stream-resource-e2e`: end-to-end coverage for the workspace. +- `apps/demo` and `apps/demo-e2e`: demo application and related end-to-end coverage. + +## Working in This Repo + +- The workspace is Nx-based. Prefer project-scoped commands over broad workspace runs unless the task actually needs broader verification. +- Inspect `project.json`, `nx.json`, and existing scripts before inventing commands. +- If you need Nx-specific syntax or behavior and it is not obvious from local config, verify it from current Nx docs rather than relying on memory. +- Respect generated and public-facing context files. If the task changes docs, API surface, positioning, or package guidance, check whether agent context or docs should be regenerated. + +## Docs and Generated Context + +- Do not commit generated plans, analyses, or reports unless explicitly requested. +- If docs or public agent guidance changes, check whether `npm run generate-agent-context` should be run. +- If API docs or narrative docs are affected, check whether `npm run generate-api-docs`, `npm run generate-narrative-docs`, or `npm run generate-docs` should be run. +- Do not regenerate files blindly. Run the smallest relevant generator for the change. + +## Commits and Review + +- Do not make mid-task commits. Group related finished work into a logical commit. +- Do not add co-author metadata unless explicitly requested. +- Before proposing a commit or claiming the task is done, review the diff and verify the relevant commands were actually run. +- If you use delegated review or sub-agents, verify their output yourself before repeating their conclusions. + +## Verification + +- Do not claim work is complete without fresh verification evidence from relevant commands. +- Verify the smallest relevant surface first, then broaden as needed. +- Prefer repo-native commands. Typical examples in this repo include `npx nx test `, `npx nx lint `, `npx nx build `, and doc generation commands when context files change. +- If a task affects only one project, verify that project first instead of defaulting to the whole workspace. +- Report what you actually verified, and call out anything you could not run. + +## Codex Notes + +- In Codex, prefer `workdir` over `cd`. +- Use `apply_patch` for targeted edits. +- Read the repo state before editing, especially in a dirty worktree. +- Do not revert user changes you did not make unless explicitly asked. diff --git a/README.md b/README.md index 0452fc388..322c2009e 100644 --- a/README.md +++ b/README.md @@ -1,7 +1,7 @@

Angular Agent Framework — The Enterprise Streaming Resource for LangChain and Angular

@@ -11,13 +11,13 @@

- - npm version + + npm version License: PolyForm Noncommercial + Commercial - + Angular 20+ @@ -27,14 +27,14 @@ --- -`agent()` is the Angular equivalent of LangGraph's React `useStream()` hook — a full-parity implementation built on Angular Signals and the Angular Resource API. It gives enterprise Angular teams the same production-grade streaming primitives available to React developers on LangChain, without compromises or workarounds. Drop it into any Angular 20+ component, point it at your LangGraph Platform endpoint, and get reactive, signal-driven access to streaming state, messages, tool calls, interrupts, and thread history. +`streamResource()` is the Angular equivalent of LangGraph's React `useStream()` hook — a full-parity implementation built on Angular Signals and the Angular Resource API. It gives enterprise Angular teams the same production-grade streaming primitives available to React developers on LangChain, without compromises or workarounds. Drop it into any Angular 20+ component, point it at your LangGraph Platform endpoint, and get reactive, signal-driven access to streaming state, messages, tool calls, interrupts, and thread history. --- ## Install ```bash -npm install @cacheplane/angular +npm install @cacheplane/stream-resource ``` **Peer dependencies:** `@angular/core ^20.0.0 || ^21.0.0`, `@langchain/core ^1.1.0`, `@langchain/langgraph-sdk ^1.7.0`, `rxjs ~7.8.0` @@ -45,7 +45,7 @@ npm install @cacheplane/angular ```typescript import { Component } from '@angular/core'; -import { agent } from '@cacheplane/angular'; +import { streamResource } from '@cacheplane/stream-resource'; import type { BaseMessage } from '@langchain/core/messages'; @Component({ @@ -65,7 +65,7 @@ import type { BaseMessage } from '@langchain/core/messages'; `, }) export class ChatComponent { - chat = agent<{ messages: BaseMessage[] }>({ + chat = streamResource<{ messages: BaseMessage[] }>({ apiUrl: 'https://your-langgraph-platform.com', assistantId: 'my-agent', messagesKey: 'messages', @@ -83,7 +83,7 @@ That's it. `chat.messages()` is an Angular Signal. Bind it directly in your temp ## Feature Comparison -| Feature | `agent()` (Angular) | `useStream()` (React) | +| Feature | `streamResource()` (Angular) | `useStream()` (React) | |---|---|---| | Streaming state as reactive primitives | Angular Signals | React state | | Messages signal | `messages()` | `messages` | @@ -99,7 +99,7 @@ That's it. `chat.messages()` is an Angular Signal. Bind it directly in your temp | Submit | `submit(values, opts?)` | `submit(values, opts?)` | | Stop | `stop()` | `stop()` | | Reload last submission | `reload()` | — | -| Custom transport (for testing) | `MockAgentTransport` | mock fetch | +| Custom transport (for testing) | `MockStreamTransport` | mock fetch | | Angular `ResourceRef` compatibility | Full duck-type parity | N/A | | Angular 20+ Signals API | Native | N/A | | SSR / Server Components | Client-side only | React Server Components (React) | @@ -111,12 +111,12 @@ That's it. `chat.messages()` is an Angular Signal. Bind it directly in your temp

Angular Agent Framework architecture: Angular Component → agent() → StreamManager Bridge → LangGraph Platform, with signals returned reactively

-`agent()` creates 12 `BehaviorSubject`s at injection-context time — once, at component construction. The `StreamManager` bridge (the only file that touches `@langchain/langgraph-sdk` internals) pushes stream events into those subjects. `toSignal()` converts each subject to an Angular Signal, also at construction time. Dynamic actions (`submit`, `stop`, `switchThread`) push into the existing subjects — no new subjects are ever created after construction. This architecture is required because `toSignal()` must be called in an injection context and cannot be called again later. +`streamResource()` creates 12 `BehaviorSubject`s at injection-context time — once, at component construction. The `StreamManager` bridge (the only file that touches `@langchain/langgraph-sdk` internals) pushes stream events into those subjects. `toSignal()` converts each subject to an Angular Signal, also at construction time. Dynamic actions (`submit`, `stop`, `switchThread`) push into the existing subjects — no new subjects are ever created after construction. This architecture is required because `toSignal()` must be called in an injection context and cannot be called again later. --- @@ -137,7 +137,7 @@ That's it. `chat.messages()` is an Angular Signal. Bind it directly in your temp - [Getting Started](https://cacheplane.ai/docs/getting-started) - [API Reference](https://cacheplane.ai/api-reference) -- [Testing with MockAgentTransport](https://cacheplane.ai/docs/testing) +- [Testing with MockStreamTransport](https://cacheplane.ai/docs/testing) - [Human-in-the-Loop / Interrupts](https://cacheplane.ai/docs/interrupts) - [Subagent Streaming](https://cacheplane.ai/docs/subagents) @@ -145,9 +145,9 @@ That's it. `chat.messages()` is an Angular Signal. Bind it directly in your temp ## License -`@cacheplane/angular` is source-available software dual-licensed: +`@cacheplane/stream-resource` is source-available software dual-licensed: - **PolyForm Noncommercial 1.0.0** — free for noncommercial use (personal projects, academic, research, non-profit internal tooling). See [`LICENSE`](./LICENSE). -- **Angular Agent Framework Commercial License** — required for any for-profit or revenue-generating use. See [`LICENSE-COMMERCIAL`](./LICENSE-COMMERCIAL) and [`COMMERCIAL.md`](./COMMERCIAL.md). +- **Angular Stream Resource Commercial License** — required for any for-profit or revenue-generating use. See [`LICENSE-COMMERCIAL`](./LICENSE-COMMERCIAL) and [`COMMERCIAL.md`](./COMMERCIAL.md). This is **not** an open-source license. Commercial use — including use in a for-profit product, service, or organization — requires a paid commercial license. See [pricing](https://cacheplane.ai/pricing). diff --git a/apps/cockpit/next-env.d.ts b/apps/cockpit/next-env.d.ts index 638811fc2..c4b7818fb 100644 --- a/apps/cockpit/next-env.d.ts +++ b/apps/cockpit/next-env.d.ts @@ -1,6 +1,6 @@ /// /// -import "./../../dist/apps/cockpit/.next/types/routes.d.ts"; +import "./.next/dev/types/routes.d.ts"; // NOTE: This file should not be edited // see https://nextjs.org/docs/app/api-reference/config/typescript for more information. diff --git a/apps/website/src/components/shared/Nav.tsx b/apps/website/src/components/shared/Nav.tsx index daffb7965..7af330835 100644 --- a/apps/website/src/components/shared/Nav.tsx +++ b/apps/website/src/components/shared/Nav.tsx @@ -6,7 +6,7 @@ import { tokens } from '@cacheplane/design-tokens'; const links = [ { label: 'Pilot to Prod', href: '/pilot-to-prod', external: false }, { label: 'Docs', href: '/docs', external: false }, - { label: 'API', href: '/docs/agent/api/agent', external: false }, + { label: 'API', href: '/docs/api/stream-resource', external: false }, { label: 'Examples', href: 'https://cockpit.cacheplane.ai', external: true }, { label: 'Pricing', href: '/pricing', external: false }, ]; @@ -50,7 +50,7 @@ export function Nav() { {/* Top bar */} @@ -125,7 +125,7 @@ export function Nav() { ))}
- **For agentic workers:** REQUIRED SUB-SKILL: Use superpowers:subagent-driven-development (recommended) or superpowers:executing-plans to implement this plan task-by-task. Steps use checkbox (`- [ ]`) syntax for tracking. + +**Goal:** Create the shared cockpit manifest, canonical IDs, language fallback behavior, and initial approved matrix inventory as executable code and content contracts. + +**Architecture:** Build `libs/cockpit-registry` first and make it the single owner of topic/page/language metadata. Add deterministic resolution helpers and initial manifest data for both products. No cockpit UI or docs rendering belongs in this phase. + +**Tech Stack:** Nx, TypeScript, Vitest, JSON or TS data modules + +--- + +### Task 1: Create the registry library + +**Files:** +- Create: `libs/cockpit-registry/project.json` +- Create: `libs/cockpit-registry/package.json` +- Create: `libs/cockpit-registry/tsconfig.json` +- Create: `libs/cockpit-registry/src/index.ts` +- Create: `libs/cockpit-registry/src/lib/manifest.types.ts` +- Create: `libs/cockpit-registry/src/lib/manifest.ts` +- Create: `libs/cockpit-registry/src/lib/manifest.spec.ts` + +- [ ] **Step 1: Write the failing manifest type test** +- [ ] **Step 2: Run the targeted test to verify it fails** +- [ ] **Step 3: Implement the minimal manifest schema types** +- [ ] **Step 4: Add exports** +- [ ] **Step 5: Run the targeted test to verify it passes** + +Run: +- `npx vitest run libs/cockpit-registry/src/lib/manifest.spec.ts` + +Expected: +- initial failure before implementation +- passing after schema is added + +### Task 2: Encode the approved starting inventory + +**Files:** +- Modify: `libs/cockpit-registry/src/lib/manifest.ts` +- Modify: `libs/cockpit-registry/src/lib/manifest.spec.ts` + +- [ ] **Step 1: Write failing tests for the approved Deep Agents and LangGraph inventory** +- [ ] **Step 2: Run the targeted test to verify it fails** +- [ ] **Step 3: Add manifest entries for all approved topics** +- [ ] **Step 4: Mark `getting-started / overview` entries as `docs-only`** +- [ ] **Step 5: Run the targeted test to verify it passes** + +Run: +- `npx vitest run libs/cockpit-registry/src/lib/manifest.spec.ts` + +Expected: +- manifest contains all approved topics and languages + +### Task 3: Implement language and fallback resolution + +**Files:** +- Create: `libs/cockpit-registry/src/lib/resolve-language.ts` +- Create: `libs/cockpit-registry/src/lib/resolve-language.spec.ts` +- Modify: `libs/cockpit-registry/src/index.ts` + +- [ ] **Step 1: Write failing tests for equivalent-page and fallback resolution** +- [ ] **Step 2: Run the targeted test to verify it fails** +- [ ] **Step 3: Implement page-equivalent lookup** +- [ ] **Step 4: Implement fallback to product `getting-started / overview`** +- [ ] **Step 5: Run the targeted test to verify it passes** + +Run: +- `npx vitest run libs/cockpit-registry/src/lib/resolve-language.spec.ts` + +Expected: +- deterministic page resolution for matching and missing parity cases + +### Task 4: Add validation helpers for planning and CI use + +**Files:** +- Create: `libs/cockpit-registry/src/lib/validate-manifest.ts` +- Create: `libs/cockpit-registry/src/lib/validate-manifest.spec.ts` +- Modify: `libs/cockpit-registry/src/index.ts` + +- [ ] **Step 1: Write failing tests for invalid duplicate ids and invalid fallback targets** +- [ ] **Step 2: Run the targeted test to verify it fails** +- [ ] **Step 3: Implement validation helpers** +- [ ] **Step 4: Export the helpers** +- [ ] **Step 5: Run the targeted test to verify it passes** + +Run: +- `npx vitest run libs/cockpit-registry/src/lib/validate-manifest.spec.ts` + +Expected: +- manifest validation rejects duplicate or inconsistent entries + +### Task 5: Verify the full phase + +**Files:** +- Verify only + +- [ ] **Step 1: Run the registry test suite** +- [ ] **Step 2: Run any lint/build target added for the new library** +- [ ] **Step 3: Commit** + +Run: +- `npx nx test cockpit-registry --skip-nx-cache` +- `npx nx lint cockpit-registry --skip-nx-cache` + +Expected: +- both pass diff --git a/docs/superpowers/plans/2026-03-20-cockpit-phase-2-harness-and-repo.md b/docs/superpowers/plans/2026-03-20-cockpit-phase-2-harness-and-repo.md new file mode 100644 index 000000000..e01938af2 --- /dev/null +++ b/docs/superpowers/plans/2026-03-20-cockpit-phase-2-harness-and-repo.md @@ -0,0 +1,105 @@ +# Cockpit Phase 2 Harness And Repo Implementation Plan + +> **For agentic workers:** REQUIRED SUB-SKILL: Use superpowers:subagent-driven-development (recommended) or superpowers:executing-plans to implement this plan task-by-task. Steps use checkbox (`- [ ]`) syntax for tracking. + +**Goal:** Establish the product-first repo structure, Nx project boundaries, shared cockpit libraries, and scaffolding path for capability modules. + +**Architecture:** Create the foundational repo units without building the full cockpit UI. This phase locks the filesystem layout, Nx project registration, and generated scaffolding conventions that later phases depend on. + +**Tech Stack:** Nx, TypeScript, Next.js config, Python run-commands targets + +--- + +### Task 1: Create the cockpit app skeleton + +**Files:** +- Create: `apps/cockpit/project.json` +- Create: `apps/cockpit/package.json` +- Create: `apps/cockpit/tsconfig.json` +- Create: `apps/cockpit/next.config.ts` +- Create: `apps/cockpit/src/app/layout.tsx` +- Create: `apps/cockpit/src/app/page.tsx` + +- [ ] **Step 1: Create a failing build by registering the app without implementation** +- [ ] **Step 2: Run the cockpit build to confirm failure** +- [ ] **Step 3: Add the minimal Next.js cockpit shell skeleton** +- [ ] **Step 4: Re-run the cockpit build** +- [ ] **Step 5: Commit** + +Run: +- `npx nx build cockpit --skip-nx-cache` + +Expected: +- initial failure, then successful minimal build + +### Task 2: Create shared cockpit library skeletons + +**Files:** +- Create: `libs/cockpit-shell/**` +- Create: `libs/cockpit-ui/**` +- Create: `libs/cockpit-docs/**` +- Create: `libs/cockpit-testing/**` + +- [ ] **Step 1: Create minimal project files for each shared library** +- [ ] **Step 2: Add placeholder exports aligned to the spec boundaries** +- [ ] **Step 3: Add lightweight build or test targets as appropriate** +- [ ] **Step 4: Verify Nx can target the new libraries** + +Run: +- `npx nx graph --file tmp/cockpit-graph.html` +- `npx nx show project cockpit-shell` + +Expected: +- new libraries appear as valid Nx projects + +### Task 3: Create the product-first capability directory conventions + +**Files:** +- Create: `cockpit/deep-agents/.gitkeep` +- Create: `cockpit/langgraph/.gitkeep` +- Create: `cockpit/README.md` +- Create: `tools/generators/cockpit-capability/**` if generation is implemented now + +- [ ] **Step 1: Add the top-level product-first directories** +- [ ] **Step 2: Document the expected module shape** +- [ ] **Step 3: If generation is implemented now, scaffold one dry-run capability template** +- [ ] **Step 4: Verify the documented shape matches Phase 1 manifest identity** + +Run: +- `rg -n "product-first|topic|page|language" cockpit/README.md` + +Expected: +- directory contract is explicit and aligned with the spec + +### Task 4: Add one end-to-end scaffolding path + +**Files:** +- Create: representative sample under `cockpit/langgraph/streaming/python/**` +- Modify: any generator/template files created in Task 3 + +- [ ] **Step 1: Create one minimal representative capability module** +- [ ] **Step 2: Verify it can be targeted by Nx if appropriate** +- [ ] **Step 3: Verify metadata binding to `libs/cockpit-registry`** +- [ ] **Step 4: Commit** + +Run: +- `npx nx show projects` + +Expected: +- one representative module exists and proves the architecture is real + +### Task 5: Verify the full phase + +**Files:** +- Verify only + +- [ ] **Step 1: Build the cockpit app** +- [ ] **Step 2: Verify the new libraries are discoverable** +- [ ] **Step 3: Commit** + +Run: +- `npx nx build cockpit --skip-nx-cache` +- `npx nx show projects | rg "cockpit"` + +Expected: +- cockpit app and shared libs are registered and buildable diff --git a/docs/superpowers/plans/2026-03-20-cockpit-phase-3-cockpit-shell.md b/docs/superpowers/plans/2026-03-20-cockpit-phase-3-cockpit-shell.md new file mode 100644 index 000000000..1e202a53f --- /dev/null +++ b/docs/superpowers/plans/2026-03-20-cockpit-phase-3-cockpit-shell.md @@ -0,0 +1,100 @@ +# Cockpit Phase 3 Cockpit Shell Implementation Plan + +> **For agentic workers:** REQUIRED SUB-SKILL: Use superpowers:subagent-driven-development (recommended) or superpowers:executing-plans to implement this plan task-by-task. Steps use checkbox (`- [ ]`) syntax for tracking. + +**Goal:** Build the cockpit shell, thin adapter contract, navigation tree, language switching, and core code/prompt/docs panes. + +**Architecture:** Implement the shell against the Phase 1 manifest and Phase 2 repo boundaries. Do not fill the full matrix yet; use a small representative set while proving the shell contract. + +**Tech Stack:** Next.js, React, TypeScript, Nx, Playwright or Vitest where appropriate + +--- + +### Task 1: Implement the shell contract in code + +**Files:** +- Create: `libs/cockpit-shell/src/lib/capability-contract.ts` +- Create: `libs/cockpit-shell/src/lib/capability-contract.spec.ts` +- Modify: `libs/cockpit-shell/src/index.ts` + +- [ ] **Step 1: Write the failing contract tests** +- [ ] **Step 2: Run them to confirm failure** +- [ ] **Step 3: Implement the thin shell contract** +- [ ] **Step 4: Re-run the tests** + +Run: +- `npx vitest run libs/cockpit-shell/src/lib/capability-contract.spec.ts` + +Expected: +- docs-only entries and runtime entries are distinguished correctly + +### Task 2: Build navigation and route resolution + +**Files:** +- Create: `apps/cockpit/src/components/navigation/**` +- Create: `apps/cockpit/src/lib/route-resolution.ts` +- Create: `apps/cockpit/src/lib/route-resolution.spec.ts` +- Modify: `apps/cockpit/src/app/page.tsx` + +- [ ] **Step 1: Write failing tests for manifest-driven route resolution** +- [ ] **Step 2: Run them to confirm failure** +- [ ] **Step 3: Implement product/section/topic/page navigation** +- [ ] **Step 4: Re-run the tests** + +Run: +- `npx vitest run apps/cockpit/src/lib/route-resolution.spec.ts` + +Expected: +- shell routes resolve from manifest metadata only + +### Task 3: Implement language switching and fallback + +**Files:** +- Create: `apps/cockpit/src/components/language-switcher.tsx` +- Create: `apps/cockpit/src/components/language-switcher.spec.tsx` +- Modify: relevant shell pages/components + +- [ ] **Step 1: Write failing UI tests for equivalent-page switching** +- [ ] **Step 2: Run them to confirm failure** +- [ ] **Step 3: Implement the switcher using Phase 1 resolution helpers** +- [ ] **Step 4: Re-run the tests** + +Run: +- `npx vitest run apps/cockpit/src/components/language-switcher.spec.tsx` + +Expected: +- switcher lands on equivalent pages or correct overview fallback + +### Task 4: Implement shell panes + +**Files:** +- Create: `apps/cockpit/src/components/code-pane/**` +- Create: `apps/cockpit/src/components/prompt-pane/**` +- Create: `apps/cockpit/src/components/docs-pane/**` +- Modify: `apps/cockpit/src/app/**` + +- [ ] **Step 1: Add failing tests for pane rendering from metadata** +- [ ] **Step 2: Implement minimal code/prompt/docs panes** +- [ ] **Step 3: Re-run the tests** + +Run: +- `npx vitest run apps/cockpit/src/components --passWithNoTests=false` + +Expected: +- shell displays metadata-driven panes for representative modules + +### Task 5: Verify the representative shell + +**Files:** +- Verify only + +- [ ] **Step 1: Build the cockpit** +- [ ] **Step 2: Run shell tests** +- [ ] **Step 3: Add a commit** + +Run: +- `npx nx build cockpit --skip-nx-cache` +- `npx nx test cockpit --skip-nx-cache` + +Expected: +- cockpit shell is buildable and navigable with representative content diff --git a/docs/superpowers/plans/2026-03-20-cockpit-phase-4-docs-system.md b/docs/superpowers/plans/2026-03-20-cockpit-phase-4-docs-system.md new file mode 100644 index 000000000..376abb610 --- /dev/null +++ b/docs/superpowers/plans/2026-03-20-cockpit-phase-4-docs-system.md @@ -0,0 +1,97 @@ +# Cockpit Phase 4 Docs System Implementation Plan + +> **For agentic workers:** REQUIRED SUB-SKILL: Use superpowers:subagent-driven-development (recommended) or superpowers:executing-plans to implement this plan task-by-task. Steps use checkbox (`- [ ]`) syntax for tracking. + +**Goal:** Build the docs bundle system, shared metadata-driven linking between the website and cockpit, and the product-first docs IA for Deep Agents and LangGraph. + +**Architecture:** Extend the existing website docs surface to consume cockpit manifest metadata. Keep authored docs in the website content system while binding them to capability topics and page ids through shared registry metadata. + +**Tech Stack:** Next.js, MDX/content files, TypeScript, Nx + +--- + +### Task 1: Create docs bundle resolution helpers + +**Files:** +- Create: `libs/cockpit-docs/src/lib/docs-bundle.ts` +- Create: `libs/cockpit-docs/src/lib/docs-bundle.spec.ts` +- Modify: `libs/cockpit-docs/src/index.ts` + +- [ ] **Step 1: Write failing tests for docs bundle lookup by topic/page/language** +- [ ] **Step 2: Run them to confirm failure** +- [ ] **Step 3: Implement the minimal docs bundle resolution** +- [ ] **Step 4: Re-run the tests** + +Run: +- `npx vitest run libs/cockpit-docs/src/lib/docs-bundle.spec.ts` + +Expected: +- docs entries resolve from shared manifest metadata + +### Task 2: Create website content structure for the new taxonomy + +**Files:** +- Create: `apps/website/content/docs/deep-agents/**` +- Create: `apps/website/content/docs/langgraph/**` +- Create: representative overview/build/prompts/code/testing docs files + +- [ ] **Step 1: Add the new product-first docs directories** +- [ ] **Step 2: Create representative content files for one Deep Agents topic and one LangGraph topic** +- [ ] **Step 3: Verify content names align with Phase 1 page ids** + +Run: +- `find apps/website/content/docs/deep-agents -maxdepth 4 -type f | sort` +- `find apps/website/content/docs/langgraph -maxdepth 4 -type f | sort` + +Expected: +- docs tree matches the approved taxonomy + +### Task 3: Bind website routes to the shared docs model + +**Files:** +- Modify: `apps/website/src/lib/docs.ts` +- Modify: `apps/website/src/app/docs/[[...slug]]/page.tsx` +- Create: tests around route mapping if missing + +- [ ] **Step 1: Write a failing route-resolution test** +- [ ] **Step 2: Run it to confirm failure** +- [ ] **Step 3: Implement docs routing against cockpit metadata** +- [ ] **Step 4: Re-run the test** + +Run: +- `npx nx test website --skip-nx-cache` if a website test target exists +- otherwise run the narrow test command introduced for this work + +Expected: +- website docs route by shared topic/page/language identity + +### Task 4: Add website-to-cockpit linking primitives + +**Files:** +- Create: `apps/website/src/components/docs/open-in-cockpit.tsx` +- Modify: docs rendering components as needed + +- [ ] **Step 1: Add failing component tests or route assertions** +- [ ] **Step 2: Implement “open in cockpit” and language-aware cross-links** +- [ ] **Step 3: Re-run the tests** + +Run: +- targeted component or route tests + +Expected: +- website docs link into cockpit views without hardcoded per-page logic + +### Task 5: Verify the docs phase + +**Files:** +- Verify only + +- [ ] **Step 1: Build the website** +- [ ] **Step 2: Verify representative docs routes** +- [ ] **Step 3: Commit** + +Run: +- `npx nx build website --skip-nx-cache` + +Expected: +- website builds with new docs structure and shared bindings diff --git a/docs/superpowers/plans/2026-03-20-cockpit-phase-5-capability-matrix-rollout.md b/docs/superpowers/plans/2026-03-20-cockpit-phase-5-capability-matrix-rollout.md new file mode 100644 index 000000000..6cc7ccb85 --- /dev/null +++ b/docs/superpowers/plans/2026-03-20-cockpit-phase-5-capability-matrix-rollout.md @@ -0,0 +1,113 @@ +# Cockpit Phase 5 Capability Matrix Rollout Implementation Plan + +> **For agentic workers:** REQUIRED SUB-SKILL: Use superpowers:subagent-driven-development (recommended) or superpowers:executing-plans to implement this plan task-by-task. Steps use checkbox (`- [ ]`) syntax for tracking. + +**Goal:** Implement the full approved Deep Agents and LangGraph capability matrix in the harness and docs system, Python-first and TypeScript where support is strong enough. + +**Architecture:** Roll out capability modules topic-by-topic using the contracts and generators established in earlier phases. Each topic is incomplete until its cockpit module, docs bundle, prompts, code mapping, and tests exist together. + +**Tech Stack:** Nx, TypeScript, Python, Next.js, pytest, Vitest + +--- + +## Rollout Batches + +- Batch A: product overviews and getting started +- Batch B: Deep Agents core capabilities +- Batch C: LangGraph core capabilities +- Batch D: TypeScript parity candidates + +--- + +### Task 1: Implement Batch A + +**Files:** +- Create/Modify: manifest entries already approved in Phase 1 +- Create: docs content for both product overviews +- Create: any docs-only cockpit bindings required + +- [ ] **Step 1: Add overview docs for Deep Agents and LangGraph** +- [ ] **Step 2: Verify language fallback for overview pages** +- [ ] **Step 3: Commit** + +Run: +- representative docs route checks + +Expected: +- both product overviews exist and are reachable from the shell and website + +### Task 2: Implement Deep Agents capability modules + +**Files:** +- Create: `cockpit/deep-agents/planning/python/**` +- Create: `cockpit/deep-agents/filesystem/python/**` +- Create: `cockpit/deep-agents/subagents/python/**` +- Create: `cockpit/deep-agents/memory/python/**` +- Create: `cockpit/deep-agents/skills/python/**` +- Create: `cockpit/deep-agents/sandboxes/python/**` +- Create matching docs bundles and tests + +- [ ] **Step 1: For each topic, scaffold the module** +- [ ] **Step 2: Add prompts, code mappings, and docs bundle entries** +- [ ] **Step 3: Add smoke tests** +- [ ] **Step 4: Verify each topic before moving on** + +Run: +- topic-level smoke targets for each implemented topic + +Expected: +- every approved Deep Agents topic has a canonical Python reference module + +### Task 3: Implement LangGraph capability modules + +**Files:** +- Create: `cockpit/langgraph/persistence/python/**` +- Create: `cockpit/langgraph/durable-execution/python/**` +- Create: `cockpit/langgraph/streaming/python/**` +- Create: `cockpit/langgraph/interrupts/python/**` +- Create: `cockpit/langgraph/memory/python/**` +- Create: `cockpit/langgraph/subgraphs/python/**` +- Create: `cockpit/langgraph/time-travel/python/**` +- Create: `cockpit/langgraph/deployment-runtime/python/**` +- Create matching docs bundles and tests + +- [ ] **Step 1: For each topic, scaffold the module** +- [ ] **Step 2: Add prompts, code mappings, and docs bundle entries** +- [ ] **Step 3: Add smoke tests** +- [ ] **Step 4: Verify each topic before moving on** + +Run: +- topic-level smoke targets for each implemented topic + +Expected: +- every approved LangGraph topic has a canonical Python reference module + +### Task 4: Add TypeScript parity candidates + +**Files:** +- Create only where the official docs/support are strong enough + +- [ ] **Step 1: Identify parity candidates from the approved matrix** +- [ ] **Step 2: Add TypeScript modules and docs where justified** +- [ ] **Step 3: Verify switcher equivalence or fallback behavior** + +Run: +- representative language-switch tests + +Expected: +- TypeScript pages exist only where they can be defended as real references + +### Task 5: Verify matrix completeness + +**Files:** +- Verify manifest, docs, and tests + +- [ ] **Step 1: Validate the manifest against the approved inventory** +- [ ] **Step 2: Verify no approved topic is missing required assets** +- [ ] **Step 3: Commit** + +Run: +- a manifest completeness check command added in Phase 1 or Phase 6 + +Expected: +- approved matrix inventory is fully represented and status-tracked diff --git a/docs/superpowers/plans/2026-03-20-cockpit-phase-6-testing-and-deployment.md b/docs/superpowers/plans/2026-03-20-cockpit-phase-6-testing-and-deployment.md new file mode 100644 index 000000000..9239eed50 --- /dev/null +++ b/docs/superpowers/plans/2026-03-20-cockpit-phase-6-testing-and-deployment.md @@ -0,0 +1,100 @@ +# Cockpit Phase 6 Testing And Deployment Implementation Plan + +> **For agentic workers:** REQUIRED SUB-SKILL: Use superpowers:subagent-driven-development (recommended) or superpowers:executing-plans to implement this plan task-by-task. Steps use checkbox (`- [ ]`) syntax for tracking. + +**Goal:** Align the cockpit and capability matrix with a production-grade testing and deployment model, including CI, cockpit e2e, capability smoke/integration registration, and deploy smoke for `cockpit.cacheplane.ai`. + +**Architecture:** Extend the existing repo testing strategy instead of creating a one-off harness flow. Capability-level smoke and integration remain separate, and cockpit-level verification proves the integrated shell, routing, language fallback, and representative capability loading. + +**Tech Stack:** Nx, Playwright, Vitest, pytest, GitHub Actions, Vercel + +--- + +### Task 1: Register capability testing contracts + +**Files:** +- Modify: `libs/cockpit-registry/**` +- Modify: capability module metadata as needed + +- [ ] **Step 1: Add failing tests for missing smoke or environment declarations** +- [ ] **Step 2: Run them to confirm failure** +- [ ] **Step 3: Extend the manifest schema to require testing contract fields** +- [ ] **Step 4: Re-run the tests** + +Run: +- registry validation tests + +Expected: +- capabilities cannot exist without explicit testing metadata + +### Task 2: Add cockpit-level e2e + +**Files:** +- Create: `apps/cockpit/e2e/**` +- Create or modify: `apps/cockpit/playwright.config.ts` +- Modify: `apps/cockpit/project.json` + +- [ ] **Step 1: Write failing e2e tests for tree navigation, language switching, and representative capability loading** +- [ ] **Step 2: Run the e2e suite to confirm failure** +- [ ] **Step 3: Implement the minimal shell wiring or test harness support** +- [ ] **Step 4: Re-run the e2e suite** + +Run: +- `npx nx e2e cockpit --skip-nx-cache` + +Expected: +- cockpit e2e covers the shell contract rather than all business logic + +### Task 3: Wire CI for capability smoke and cockpit integration + +**Files:** +- Modify: `.github/workflows/ci.yml` +- Modify: `.github/workflows/e2e.yml` +- Modify: `.github/workflows/publish.yml` if the cockpit has release-facing artifact checks + +- [ ] **Step 1: Add CI jobs for cockpit build/test/e2e** +- [ ] **Step 2: Add representative capability smoke lanes** +- [ ] **Step 3: Keep secret-backed integration explicit and separate** +- [ ] **Step 4: Verify every referenced command is locally runnable** + +Run: +- local command matrix matching the new workflow steps + +Expected: +- CI catches cockpit and representative capability regressions before merge + +### Task 4: Add deployment and post-deploy smoke + +**Files:** +- Modify: cockpit deployment workflow or Vercel configuration as needed +- Create: deploy-smoke helper script if needed + +- [ ] **Step 1: Define critical cockpit deploy routes** +- [ ] **Step 2: Add post-deploy smoke against `cockpit.cacheplane.ai` or the deployment preview URL** +- [ ] **Step 3: Fail the deploy flow on cockpit health regressions** + +Run: +- local dry-run of the deploy smoke script against a known-good URL + +Expected: +- deployed cockpit health is verified, not assumed + +### Task 5: Verify the final phase + +**Files:** +- Verify only + +- [ ] **Step 1: Run representative capability smoke targets** +- [ ] **Step 2: Run cockpit build, test, and e2e** +- [ ] **Step 3: Run website build if docs integration changed** +- [ ] **Step 4: Commit** + +Run: +- `npx nx build cockpit --skip-nx-cache` +- `npx nx test cockpit --skip-nx-cache` +- `npx nx e2e cockpit --skip-nx-cache` +- representative capability smoke commands +- `npx nx build website --skip-nx-cache` + +Expected: +- cockpit platform and representative capability surfaces are release-verifiable diff --git a/docs/superpowers/plans/2026-03-20-cockpit-phased-execution.md b/docs/superpowers/plans/2026-03-20-cockpit-phased-execution.md new file mode 100644 index 000000000..fc9d4a655 --- /dev/null +++ b/docs/superpowers/plans/2026-03-20-cockpit-phased-execution.md @@ -0,0 +1,111 @@ +# Cockpit Phased Execution Implementation Plan + +> **For agentic workers:** REQUIRED SUB-SKILL: Use superpowers:subagent-driven-development (recommended) or superpowers:executing-plans to implement this plan task-by-task. Steps use checkbox (`- [ ]`) syntax for tracking. + +**Goal:** Build the full cockpit platform, capability harness, docs system, and deployment/testing model for Deep Agents and LangGraph developer-reference demos. + +**Architecture:** Execute in six ordered phases. Phase 1 locks the shared manifest and IA contract. Phase 2 builds the harness and repo structure. Phase 3 builds the cockpit shell. Phase 4 builds the docs system. Phase 5 fills the full capability matrix. Phase 6 hardens testing and deployment. Earlier phases define contracts that later phases consume. + +**Tech Stack:** Nx, Next.js, TypeScript, Python, Playwright, Vitest, pytest, GitHub Actions, Vercel + +--- + +## Compact Context + +- Top-level surfaces: + - `apps/website` + - `apps/cockpit` +- Product-first capability tree: + - `cockpit/deep-agents//` + - `cockpit/langgraph//` +- Shared library families: + - `libs/cockpit-registry` + - `libs/cockpit-shell` + - `libs/cockpit-ui` + - `libs/cockpit-docs` + - `libs/cockpit-testing` +- Canonical registry identity: + - `product / section / topic / page / language` +- Canonical page ids: + - `overview` + - `build` + - `prompts` + - `code` + - `testing` +- Entry kinds: + - `docs-only` + - `capability` +- Runtime classes: + - `docs-only` + - `browser` + - `local-service` + - `secret-gated` + - `deployed-service` +- Approved starting inventory: + - Deep Agents: `overview`, `planning`, `filesystem`, `subagents`, `memory`, `skills`, `sandboxes` + - LangGraph: `overview`, `persistence`, `durable-execution`, `streaming`, `interrupts`, `memory`, `subgraphs`, `time-travel`, `deployment-runtime` + +--- + +## Phase Order + +1. Phase 1: Manifest and IA foundation +2. Phase 2: Harness and repo architecture +3. Phase 3: Cockpit shell and adapter contract +4. Phase 4: Docs system and website integration +5. Phase 5: Capability matrix rollout +6. Phase 6: Testing and deployment hardening + +--- + +## Worktree Map + +- `.worktrees/cockpit-phase-1-manifest` +- `.worktrees/cockpit-phase-2-harness` +- `.worktrees/cockpit-phase-3-shell` +- `.worktrees/cockpit-phase-4-docs` +- `.worktrees/cockpit-phase-5-matrix` +- `.worktrees/cockpit-phase-6-testing` + +Worktree ownership must remain disjoint wherever possible. Phase 5 depends on Phase 1-4 contracts and may need multiple sub-worktrees later, but not before the platform slices are merged. + +--- + +## Phase Plan Files + +- [2026-03-20-cockpit-phase-1-manifest-and-ia.md](/Users/blove/repos/stream-resource/docs/superpowers/plans/2026-03-20-cockpit-phase-1-manifest-and-ia.md) +- [2026-03-20-cockpit-phase-2-harness-and-repo.md](/Users/blove/repos/stream-resource/docs/superpowers/plans/2026-03-20-cockpit-phase-2-harness-and-repo.md) +- [2026-03-20-cockpit-phase-3-cockpit-shell.md](/Users/blove/repos/stream-resource/docs/superpowers/plans/2026-03-20-cockpit-phase-3-cockpit-shell.md) +- [2026-03-20-cockpit-phase-4-docs-system.md](/Users/blove/repos/stream-resource/docs/superpowers/plans/2026-03-20-cockpit-phase-4-docs-system.md) +- [2026-03-20-cockpit-phase-5-capability-matrix-rollout.md](/Users/blove/repos/stream-resource/docs/superpowers/plans/2026-03-20-cockpit-phase-5-capability-matrix-rollout.md) +- [2026-03-20-cockpit-phase-6-testing-and-deployment.md](/Users/blove/repos/stream-resource/docs/superpowers/plans/2026-03-20-cockpit-phase-6-testing-and-deployment.md) + +--- + +## Execution Gates + +- [ ] Phase 1 must land before any shell/docs/matrix work. +- [ ] Phase 2 must land before capability projects are scaffolded in bulk. +- [ ] Phase 3 and Phase 4 may proceed in parallel after Phase 1 and Phase 2. +- [ ] Phase 5 starts only after the shell, docs metadata contract, and harness generators/structure are in place. +- [ ] Phase 6 runs after at least one representative capability per product is integrated into the cockpit. + +--- + +## Final Integration Matrix + +Minimum end-state verification: + +- `npx nx build cockpit --skip-nx-cache` +- `npx nx test cockpit --skip-nx-cache` +- `npx nx e2e cockpit --skip-nx-cache` +- `npx nx build website --skip-nx-cache` +- representative Python smoke targets for Deep Agents and LangGraph +- representative TypeScript capability smoke targets where implemented +- post-deploy smoke for `cockpit.cacheplane.ai` + +--- + +## Final Step + +- [ ] After all six phases complete and verification passes, run final review and integrate work into the approved branch strategy in effect at that time. diff --git a/docs/superpowers/plans/2026-03-21-agent-first-docs.md b/docs/superpowers/plans/2026-03-21-agent-first-docs.md new file mode 100644 index 000000000..8acd878ea --- /dev/null +++ b/docs/superpowers/plans/2026-03-21-agent-first-docs.md @@ -0,0 +1,249 @@ +# Agent-First Docs Implementation Plan + +> **For agentic workers:** REQUIRED SUB-SKILL: Use superpowers:subagent-driven-development (recommended) or superpowers:executing-plans to implement this plan task-by-task. Steps use checkbox (`- [ ]`) syntax for tracking. + +**Goal:** Build a one-time high-quality docs generation system that produces agent-first Markdown documentation from code and examples, then supports long-lived contributor iteration with advisory drift reporting. + +**Architecture:** The docs system will extract structured source-of-truth artifacts from code, examples, tests, prompts, and manifests; synthesize excellent first-pass developer documentation; materialize that output into Markdown; and provide drift warnings rather than CI gates. The implementation is split into extraction, synthesis, Markdown generation, validation/reporting, and docs-surface integration. + +**Tech Stack:** Nx, TypeScript, Next.js, Markdown/MDX, cockpit manifest metadata, Node scripts + +--- + +## File Map + +- Create: `libs/docs-source/**` + Extract source-of-truth artifacts from code, examples, tests, prompts, and manifests. +- Create: `libs/docs-synthesis/**` + Assemble page models and narrative synthesis inputs. +- Create: `libs/docs-generation/**` + Materialize generated Markdown and frontmatter into the docs tree. +- Create: `libs/docs-drift/**` + Compare generated expectations against maintained Markdown and emit advisory warnings/prompts. +- Modify: `apps/website/content/docs/**` + Receive generated Markdown baseline. +- Modify: `apps/website/src/lib/docs.ts` + Read generated metadata and content consistently. +- Modify: `apps/cockpit/**` only if UI integration needs richer source mappings. +- Create: `tools/docs/**` + Entry scripts for generation, validation, and drift reporting. + +This plan intentionally separates extraction, synthesis, generation, and drift reporting so each part can be tested independently. + +## Ownership Rules + +- Generated Markdown is written into `apps/website/content/docs/**` for the initial pass. +- Generator-owned metadata blocks and sections must be clearly marked. +- Post-generation contributor edits are allowed. +- The generator must not blindly overwrite maintained Markdown during ordinary iteration. +- Drift reporting compares current source-of-truth expectations to maintained Markdown and emits warnings/prompts rather than forcing regeneration. + +## Narrative Rules + +For the initial pass, narrative sections such as: + +- why this example exists +- architecture +- common failure modes +- related examples + +are generated from structured heuristics based on: + +- manifest metadata +- prompt assets +- verification targets +- commands +- code maps +- dependency relationships + +These sections are expected to be strong but not perfect. They become editable Markdown after generation. + +## First-Pass Scope + +The first pass must cover all docs families coherently, but implementation should prioritize sources that already have structured metadata and good extraction surfaces: + +- cockpit capability docs +- Deep Agents capability docs +- LangGraph capability docs +- StreamResource library docs where current examples, prompts, tests, and code maps are sufficient + +If a docs family has weaker source data, generate the best viable baseline rather than inventing unsupported claims. + +--- + +### Task 1: Build the source extraction layer + +**Files:** +- Create: `libs/docs-source/project.json` +- Create: `libs/docs-source/package.json` +- Create: `libs/docs-source/tsconfig.json` +- Create: `libs/docs-source/src/index.ts` +- Create: `libs/docs-source/src/lib/extract-manifest.ts` +- Create: `libs/docs-source/src/lib/extract-code-map.ts` +- Create: `libs/docs-source/src/lib/extract-prompts.ts` +- Create: `libs/docs-source/src/lib/extract-verification.ts` +- Create: `libs/docs-source/src/lib/docs-source.spec.ts` + +- [ ] **Step 1: Write failing tests for manifest, code-map, prompt, and verification extraction** +- [ ] **Step 2: Run the targeted tests to verify they fail** +- [ ] **Step 3: Implement minimal extractors against existing cockpit and website metadata** +- [ ] **Step 3: Implement minimal extractors against existing cockpit, docs, prompt, and verification metadata** +- [ ] **Step 4: Export the extraction APIs** +- [ ] **Step 5: Re-run the targeted tests** +- [ ] **Step 6: Commit** + +Run: +- `npx vitest run libs/docs-source/src/lib/docs-source.spec.ts` + +Expected: +- tests fail before implementation and pass after extractors are added + +--- + +### Task 2: Build the page synthesis layer + +**Files:** +- Create: `libs/docs-synthesis/project.json` +- Create: `libs/docs-synthesis/package.json` +- Create: `libs/docs-synthesis/tsconfig.json` +- Create: `libs/docs-synthesis/src/index.ts` +- Create: `libs/docs-synthesis/src/lib/page-model.ts` +- Create: `libs/docs-synthesis/src/lib/synthesize-page.ts` +- Create: `libs/docs-synthesis/src/lib/synthesize-page.spec.ts` + +- [ ] **Step 1: Write failing tests for the standard page model** +- [ ] **Step 2: Run the targeted tests to verify they fail** +- [ ] **Step 3: Implement synthesis for the standard sections** +- [ ] **Step 4: Ensure the output includes frontend, backend, prompts, verification, and failure modes** +- [ ] **Step 5: Re-run the targeted tests** +- [ ] **Step 6: Commit** + +Run: +- `npx vitest run libs/docs-synthesis/src/lib/synthesize-page.spec.ts` + +Expected: +- a synthesized page model exists for capability docs and library docs, including generated narrative sections + +--- + +### Task 3: Materialize generated Markdown + +**Files:** +- Create: `libs/docs-generation/project.json` +- Create: `libs/docs-generation/package.json` +- Create: `libs/docs-generation/tsconfig.json` +- Create: `libs/docs-generation/src/index.ts` +- Create: `libs/docs-generation/src/lib/render-markdown.ts` +- Create: `libs/docs-generation/src/lib/render-markdown.spec.ts` +- Create: `tools/docs/generate-docs.ts` +- Modify: `apps/website/content/docs/**` + +- [ ] **Step 1: Write failing tests for Markdown rendering with stable frontmatter/metadata** +- [ ] **Step 2: Run the targeted tests to verify they fail** +- [ ] **Step 3: Implement Markdown rendering from the synthesized page model** +- [ ] **Step 4: Add the generation script** +- [ ] **Step 5: Materialize representative docs into the website docs tree** +- [ ] **Step 6: Re-run the targeted tests** +- [ ] **Step 7: Commit** + +Run: +- `npx vitest run libs/docs-generation/src/lib/render-markdown.spec.ts` +- `npx tsx tools/docs/generate-docs.ts --check` + +Expected: +- generated Markdown is deterministic and suitable for long-lived editing + +--- + +### Task 4: Integrate generated docs with the existing docs surfaces + +**Files:** +- Modify: `apps/website/src/lib/docs.ts` +- Modify: `apps/website/src/app/docs/[[...slug]]/page.tsx` +- Modify: any docs-routing helpers that need to consume generated metadata +- Create/Modify: tests for generated-doc resolution + +- [ ] **Step 1: Write failing tests for generated-doc discovery in the website docs layer** +- [ ] **Step 2: Run the targeted tests to verify they fail** +- [ ] **Step 3: Update the docs reader to consume generated Markdown and metadata consistently** +- [ ] **Step 4: Re-run the targeted tests** +- [ ] **Step 5: Build the website** +- [ ] **Step 6: Commit** + +Run: +- targeted docs tests introduced in this task +- `NX_DAEMON=false npx nx build website --skip-nx-cache` + +Expected: +- website docs resolve generated content correctly + +--- + +### Task 5: Add advisory drift reporting + +**Files:** +- Create: `libs/docs-drift/project.json` +- Create: `libs/docs-drift/package.json` +- Create: `libs/docs-drift/tsconfig.json` +- Create: `libs/docs-drift/src/index.ts` +- Create: `libs/docs-drift/src/lib/detect-drift.ts` +- Create: `libs/docs-drift/src/lib/detect-drift.spec.ts` +- Create: `tools/docs/report-drift.ts` + +- [ ] **Step 1: Write failing tests for stale-doc detection** +- [ ] **Step 2: Run the targeted tests to verify they fail** +- [ ] **Step 3: Implement drift detection against selected source-of-truth fields** +- [ ] **Step 4: Emit warning output and suggested prompt/update brief instead of exit failures** +- [ ] **Step 5: Re-run the targeted tests** +- [ ] **Step 6: Commit** + +Run: +- `npx vitest run libs/docs-drift/src/lib/detect-drift.spec.ts` +- `npx tsx tools/docs/report-drift.ts` + +Expected: +- drift is reported clearly without gating documentation updates + +--- + +### Task 6: Generate the first full docs baseline + +**Files:** +- Modify: `apps/website/content/docs/**` +- Modify: any generation config or templates needed + +- [ ] **Step 1: Run the full docs generation pipeline against the current codebase** +- [ ] **Step 2: Review representative outputs across StreamResource, Deep Agents, LangGraph, and cockpit-linked docs** +- [ ] **Step 3: Fix the highest-signal generation defects** +- [ ] **Step 4: Re-run generation** +- [ ] **Step 5: Build the website and verify representative routes** +- [ ] **Step 6: Commit** + +Run: +- `npx tsx tools/docs/generate-docs.ts` +- `NX_DAEMON=false npx nx build website --skip-nx-cache` + +Expected: +- the repository contains a strong first-pass Markdown docs baseline generated from source-of-truth artifacts + +--- + +### Task 7: Final verification + +**Files:** +- Verify only + +- [ ] **Step 1: Run all docs-system test suites** +- [ ] **Step 2: Run generation and drift reporting** +- [ ] **Step 3: Build the website** +- [ ] **Step 4: Review representative docs routes manually or via targeted checks** +- [ ] **Step 5: Commit** + +Run: +- `npx vitest run libs/docs-source/src/lib/docs-source.spec.ts libs/docs-synthesis/src/lib/synthesize-page.spec.ts libs/docs-generation/src/lib/render-markdown.spec.ts libs/docs-drift/src/lib/detect-drift.spec.ts` +- `npx tsx tools/docs/generate-docs.ts --check` +- `npx tsx tools/docs/report-drift.ts` +- `NX_DAEMON=false npx nx build website --skip-nx-cache` + +Expected: +- docs generation, docs validation, and website build all pass diff --git a/docs/superpowers/plans/2026-03-22-cockpit-ui-refresh.md b/docs/superpowers/plans/2026-03-22-cockpit-ui-refresh.md new file mode 100644 index 000000000..b9c8dfb4d --- /dev/null +++ b/docs/superpowers/plans/2026-03-22-cockpit-ui-refresh.md @@ -0,0 +1,380 @@ +# Cockpit UI Refresh Implementation Plan + +> **For agentic workers:** REQUIRED SUB-SKILL: Use superpowers:subagent-driven-development (recommended) or superpowers:executing-plans to implement this plan task-by-task. Steps use checkbox (`- [ ]`) syntax for tracking. + +**Goal:** Implement the approved cockpit UI refresh so the shell becomes a full-height, website-aligned product surface with `Run`, `Code`, and `Docs` modes, a grouped left rail, a custom language picker, an IDE-like code tab strip, and a prompt slide-over. + +**Architecture:** Keep the existing manifest-driven route-resolution and presentation model. Refactor the shell into a layout-oriented React composition where the left rail stays stable and the main workspace swaps between mode-specific views. Prompt assets should move out of the main pane composition into a slide-over interaction owned by the shell. + +**Tech Stack:** Next.js app router, React, existing cockpit shell components, Nx, Vitest, Playwright, existing cockpit registry/docs metadata. + +--- + +### Task 1: Lock The Shell Composition Contract + +**Files:** +- Modify: `apps/cockpit/src/components/cockpit-shell.tsx` +- Modify: `apps/cockpit/src/lib/cockpit-page.ts` +- Test: `apps/cockpit/src/components/pane-rendering.spec.tsx` + +- [ ] **Step 1: Write failing tests for the new top-level shell structure** + +Add or update tests to assert: +- left rail renders once and remains present across mode changes +- primary modes are exactly `Run`, `Code`, and `Docs` +- `Run` is active by default +- prompt assets are not rendered as a primary pane in the default shell body + +- [ ] **Step 2: Run the targeted shell test to verify it fails** + +Run: `npx vitest run apps/cockpit/src/components/pane-rendering.spec.tsx` + +Expected: fail because the current shell still renders docs/code/prompt panes inline and does not expose the new mode model. + +- [ ] **Step 3: Refactor `cockpit-shell.tsx` into stable rail + mode workspace composition** + +Implement: +- stable left rail region +- top workspace header +- primary mode switch +- mode-specific body composition + +- [ ] **Step 4: Re-run the shell test** + +Run: `npx vitest run apps/cockpit/src/components/pane-rendering.spec.tsx` + +Expected: pass + +- [ ] **Step 5: Commit** + +```bash +git add apps/cockpit/src/components/cockpit-shell.tsx apps/cockpit/src/lib/cockpit-page.ts apps/cockpit/src/components/pane-rendering.spec.tsx +git commit -m "feat: restructure cockpit shell layout" +``` + +### Task 2: Build The Stable Left Rail + +**Files:** +- Create: `apps/cockpit/src/components/sidebar/cockpit-sidebar.tsx` +- Create: `apps/cockpit/src/components/sidebar/cockpit-sidebar.spec.tsx` +- Create: `apps/cockpit/src/components/sidebar/language-picker.tsx` +- Create: `apps/cockpit/src/components/sidebar/language-picker.spec.tsx` +- Create: `apps/cockpit/src/components/sidebar/navigation-groups.tsx` +- Modify: `apps/cockpit/src/components/navigation/navigation-tree.tsx` + +- [ ] **Step 1: Write failing component tests for grouped navigation and custom language picker** + +Cover: +- grouped section headers render (`Deep Agents`, `LangGraph`) +- current entry is highlighted +- language picker opens a custom menu instead of rendering a native select +- current language is reflected in the trigger + +- [ ] **Step 2: Run the sidebar tests to verify they fail** + +Run: `npx vitest run apps/cockpit/src/components/sidebar/cockpit-sidebar.spec.tsx apps/cockpit/src/components/sidebar/language-picker.spec.tsx` + +Expected: fail because these components do not exist yet. + +- [ ] **Step 3: Implement the left rail components** + +Use: +- grouped navigation +- custom popover/dropdown language picker +- minimal supporting copy + +- [ ] **Step 4: Re-run the sidebar tests** + +Run: `npx vitest run apps/cockpit/src/components/sidebar/cockpit-sidebar.spec.tsx apps/cockpit/src/components/sidebar/language-picker.spec.tsx` + +Expected: pass + +- [ ] **Step 5: Commit** + +```bash +git add apps/cockpit/src/components/sidebar apps/cockpit/src/components/navigation/navigation-tree.tsx +git commit -m "feat: add cockpit sidebar navigation and language picker" +``` + +### Task 3: Implement The Primary Mode Switch + +**Files:** +- Create: `apps/cockpit/src/components/modes/mode-switcher.tsx` +- Create: `apps/cockpit/src/components/modes/mode-switcher.spec.tsx` +- Modify: `apps/cockpit/src/components/cockpit-shell.tsx` + +- [ ] **Step 1: Write the failing test for mode switching** + +Cover: +- only `Run`, `Code`, and `Docs` are shown +- `Run` is active by default +- clicking another mode updates visible content + +- [ ] **Step 2: Run the mode switcher test to verify it fails** + +Run: `npx vitest run apps/cockpit/src/components/modes/mode-switcher.spec.tsx` + +Expected: fail because the component does not exist yet. + +- [ ] **Step 3: Implement the mode switcher and integrate it into the shell** + +- [ ] **Step 4: Re-run the mode switcher test** + +Run: `npx vitest run apps/cockpit/src/components/modes/mode-switcher.spec.tsx` + +Expected: pass + +- [ ] **Step 5: Commit** + +```bash +git add apps/cockpit/src/components/modes apps/cockpit/src/components/cockpit-shell.tsx +git commit -m "feat: add cockpit primary mode switching" +``` + +### Task 4: Rebuild Run Mode As The Default Live Surface + +**Files:** +- Create: `apps/cockpit/src/components/run-mode/run-mode.tsx` +- Create: `apps/cockpit/src/components/run-mode/run-mode.spec.tsx` +- Modify: `apps/cockpit/src/components/cockpit-shell.tsx` + +- [ ] **Step 1: Write a failing test for run-mode default rendering** + +Cover: +- run surface renders by default +- compact supporting context renders beside or below the surface +- docs/code body is not shown until mode changes + +- [ ] **Step 2: Run the run-mode test to verify it fails** + +Run: `npx vitest run apps/cockpit/src/components/run-mode/run-mode.spec.tsx` + +Expected: fail because the run mode component does not exist yet. + +- [ ] **Step 3: Implement run mode** + +Keep it simple: +- dominant live surface +- minimal supporting context +- no extra nested card stacks + +- [ ] **Step 4: Re-run the run-mode test** + +Run: `npx vitest run apps/cockpit/src/components/run-mode/run-mode.spec.tsx` + +Expected: pass + +- [ ] **Step 5: Commit** + +```bash +git add apps/cockpit/src/components/run-mode apps/cockpit/src/components/cockpit-shell.tsx +git commit -m "feat: add default cockpit run mode" +``` + +### Task 5: Rebuild Code Mode As A Tabbed IDE Surface + +**Files:** +- Create: `apps/cockpit/src/components/code-mode/code-mode.tsx` +- Create: `apps/cockpit/src/components/code-mode/code-mode.spec.tsx` +- Modify: `apps/cockpit/src/components/code-pane/code-pane.tsx` +- Modify: `apps/cockpit/src/components/cockpit-shell.tsx` + +- [ ] **Step 1: Write failing tests for the approved code-mode behavior** + +Cover: +- relevant files render as tabs across the top of the editor +- there is only one active file open at a time +- no left-side file column is rendered in code mode +- switching tabs updates the visible file content + +- [ ] **Step 2: Run the code-mode test to verify it fails** + +Run: `npx vitest run apps/cockpit/src/components/code-mode/code-mode.spec.tsx` + +Expected: fail because current code mode does not exist. + +- [ ] **Step 3: Implement the code-mode component** + +Use: +- manifest-provided relevant files +- top tab strip +- single active file model +- editor header with full file path + +- [ ] **Step 4: Re-run the code-mode test** + +Run: `npx vitest run apps/cockpit/src/components/code-mode/code-mode.spec.tsx` + +Expected: pass + +- [ ] **Step 5: Commit** + +```bash +git add apps/cockpit/src/components/code-mode apps/cockpit/src/components/code-pane/code-pane.tsx apps/cockpit/src/components/cockpit-shell.tsx +git commit -m "feat: implement cockpit code mode" +``` + +### Task 6: Rebuild Docs Mode As A Clean Implementation Guide + +**Files:** +- Create: `apps/cockpit/src/components/docs-mode/docs-mode.tsx` +- Create: `apps/cockpit/src/components/docs-mode/docs-mode.spec.tsx` +- Modify: `apps/cockpit/src/components/docs-pane/docs-pane.tsx` +- Modify: `apps/cockpit/src/components/cockpit-shell.tsx` + +- [ ] **Step 1: Write failing tests for docs-mode rendering** + +Cover: +- docs mode reads as a document page, not a dashboard of panes +- explanation content renders with a strong title and body +- inline code snippets and prompt copy affordances can appear + +- [ ] **Step 2: Run the docs-mode test to verify it fails** + +Run: `npx vitest run apps/cockpit/src/components/docs-mode/docs-mode.spec.tsx` + +Expected: fail because the new docs mode component does not exist. + +- [ ] **Step 3: Implement docs mode** + +Keep it simple: +- document-like layout +- explanation first +- inline code and copy affordances where useful + +- [ ] **Step 4: Re-run the docs-mode test** + +Run: `npx vitest run apps/cockpit/src/components/docs-mode/docs-mode.spec.tsx` + +Expected: pass + +- [ ] **Step 5: Commit** + +```bash +git add apps/cockpit/src/components/docs-mode apps/cockpit/src/components/docs-pane/docs-pane.tsx apps/cockpit/src/components/cockpit-shell.tsx +git commit -m "feat: implement cockpit docs mode" +``` + +### Task 7: Move Prompt Assets Into A Responsive Slide-Over + +**Files:** +- Create: `apps/cockpit/src/components/prompt-drawer/prompt-drawer.tsx` +- Create: `apps/cockpit/src/components/prompt-drawer/prompt-drawer.spec.tsx` +- Modify: `apps/cockpit/src/components/prompt-pane/prompt-pane.tsx` +- Modify: `apps/cockpit/src/components/cockpit-shell.tsx` + +- [ ] **Step 1: Write failing tests for prompt drawer behavior** + +Cover: +- clicking `Open prompt assets` opens the drawer +- prompt assets can switch within the drawer +- drawer closes without destroying main mode state +- drawer remains secondary to the main shell + +- [ ] **Step 2: Run the prompt-drawer test to verify it fails** + +Run: `npx vitest run apps/cockpit/src/components/prompt-drawer/prompt-drawer.spec.tsx` + +Expected: fail because the drawer component does not exist. + +- [ ] **Step 3: Implement prompt drawer** + +Use: +- header CTA trigger +- responsive slide-over positioning +- copy-oriented actions +- prompt asset switching + +- [ ] **Step 4: Re-run the prompt-drawer test** + +Run: `npx vitest run apps/cockpit/src/components/prompt-drawer/prompt-drawer.spec.tsx` + +Expected: pass + +- [ ] **Step 5: Commit** + +```bash +git add apps/cockpit/src/components/prompt-drawer apps/cockpit/src/components/prompt-pane/prompt-pane.tsx apps/cockpit/src/components/cockpit-shell.tsx +git commit -m "feat: add cockpit prompt drawer" +``` + +### Task 8: Align Styling With The Website Design System + +**Files:** +- Modify: `apps/cockpit/src/app/layout.tsx` +- Modify: `apps/cockpit/src/app/page.tsx` +- Modify: `apps/cockpit/src/app/[...slug]/page.tsx` +- Create: `apps/cockpit/src/app/cockpit.css` or equivalent local style module if needed + +- [ ] **Step 1: Write a lightweight rendering test for key shell class names or structure** + +Cover: +- full-height shell layout +- stable left rail +- top tab strip in code mode +- slide-over prompt container exists + +- [ ] **Step 2: Run the styling-oriented rendering test to verify it fails** + +Run: `npx vitest run apps/cockpit/src/components/pane-rendering.spec.tsx` + +Expected: fail until the new structure is fully reflected. + +- [ ] **Step 3: Implement the final shell styling** + +Keep to the approved direction: +- full-height application shell +- fewer nested rounded boxes +- stronger typography +- flatter surfaces +- website-aligned dark palette + +- [ ] **Step 4: Re-run the rendering test** + +Run: `npx vitest run apps/cockpit/src/components/pane-rendering.spec.tsx` + +Expected: pass + +- [ ] **Step 5: Commit** + +```bash +git add apps/cockpit/src/app apps/cockpit/src/components +git commit -m "feat: style refreshed cockpit shell" +``` + +### Task 9: Verify The Refreshed Cockpit End To End + +**Files:** +- Verify only + +- [ ] **Step 1: Run the targeted cockpit component tests** + +Run: +`npx vitest run apps/cockpit/src/components/pane-rendering.spec.tsx apps/cockpit/src/components/sidebar/cockpit-sidebar.spec.tsx apps/cockpit/src/components/sidebar/language-picker.spec.tsx apps/cockpit/src/components/modes/mode-switcher.spec.tsx apps/cockpit/src/components/run-mode/run-mode.spec.tsx apps/cockpit/src/components/code-mode/code-mode.spec.tsx apps/cockpit/src/components/docs-mode/docs-mode.spec.tsx apps/cockpit/src/components/prompt-drawer/prompt-drawer.spec.tsx` + +Expected: all pass + +- [ ] **Step 2: Run cockpit test target** + +Run: `npx nx test cockpit --skip-nx-cache` + +Expected: pass + +- [ ] **Step 3: Run cockpit build** + +Run: `npx nx build cockpit --skip-nx-cache` + +Expected: pass + +- [ ] **Step 4: Run cockpit e2e** + +Run: `npx nx e2e cockpit --skip-nx-cache` + +Expected: pass + +- [ ] **Step 5: Commit** + +```bash +git add . +git commit -m "test: verify refreshed cockpit ui" +``` diff --git a/docs/superpowers/plans/2026-04-05-narrative-redesign.md b/docs/superpowers/plans/2026-04-05-narrative-redesign.md new file mode 100644 index 000000000..5113a7898 --- /dev/null +++ b/docs/superpowers/plans/2026-04-05-narrative-redesign.md @@ -0,0 +1,1464 @@ +# Narrative Redesign Implementation Plan + +> **For agentic workers:** REQUIRED SUB-SKILL: Use superpowers:subagent-driven-development (recommended) or superpowers:executing-plans to implement this plan task-by-task. Steps use checkbox (`- [ ]`) syntax for tracking. + +**Goal:** Add four new landing page sections (ProblemSection, FullStackSection, ChatFeaturesSection, FairComparisonSection) and one minor copy update (FeatureStrip) to reframe the site around the last-mile production narrative. + +**Architecture:** Each section is a standalone `'use client'` React component in `apps/website/src/components/landing/`. They use Framer Motion with `whileInView + viewport={{ once: true }}` for scroll-triggered entrance, `@cacheplane/design-tokens` for all colors and glass values, and inline SVG for particle animations. No new dependencies. `page.tsx` is updated to insert sections in a specific order. + +**Tech Stack:** Next.js 16, React 19, TypeScript, Framer Motion (already installed), `@cacheplane/design-tokens` (workspace package) + +--- + +## File Map + +| Action | Path | Responsibility | +|--------|------|----------------| +| Create | `apps/website/src/components/landing/ProblemSection.tsx` | 3 stat cards + animated gap progress bar | +| Create | `apps/website/src/components/landing/FullStackSection.tsx` | Vertical stack diagram with SVG particle connectors + roadmap strip | +| Create | `apps/website/src/components/landing/ChatFeaturesSection.tsx` | 4-tab interactive chat scenarios | +| Create | `apps/website/src/components/landing/FairComparisonSection.tsx` | Static comparison table | +| Modify | `apps/website/src/components/landing/FeatureStrip.tsx` | Replace one feature entry copy | +| Modify | `apps/website/src/app/page.tsx` | Insert new sections in correct order | + +--- + +### Task 1: ProblemSection — stat cards and gap animation + +**Files:** +- Create: `apps/website/src/components/landing/ProblemSection.tsx` + +- [ ] **Step 1: Create the file** + +```tsx +'use client'; +import { useRef, useEffect, useState, useCallback } from 'react'; +import { motion, useInView } from 'framer-motion'; +import { tokens } from '@cacheplane/design-tokens'; + +const STATS = [ + { num: '66%', label: 'of AI solutions are almost right — not quite production-ready' }, + { num: '31%', label: 'of prioritized AI use cases actually reach production' }, + { num: '75%', label: 'of developers still want a human in the loop when trust breaks down' }, +]; + +function useCounter(target: number, duration: number, running: boolean) { + const [value, setValue] = useState(0); + useEffect(() => { + if (!running) return; + const start = performance.now(); + let raf: number; + function tick(now: number) { + const t = Math.min((now - start) / duration, 1); + const eased = t < 0.5 ? 2 * t * t : -1 + (4 - 2 * t) * t; + setValue(Math.round(eased * target)); + if (t < 1) raf = requestAnimationFrame(tick); + } + raf = requestAnimationFrame(tick); + return () => cancelAnimationFrame(raf); + }, [running, target, duration]); + return value; +} + +type Phase = 'idle' | 'filling' | 'stall' | 'closing' | 'done'; + +export function ProblemSection() { + const triggerRef = useRef(null); + const inView = useInView(triggerRef, { once: true, amount: 0.3 }); + const [phase, setPhase] = useState('idle'); + const [fillWidth, setFillWidth] = useState('0%'); + const [fillGradient, setFillGradient] = useState( + `linear-gradient(90deg, rgba(221,0,49,.6), rgba(221,0,49,.4))` + ); + const [fillTransition, setFillTransition] = useState('none'); + const counterRunning77 = phase === 'filling'; + const counterRunning100 = phase === 'closing' || phase === 'done'; + const count77 = useCounter(77, 1700, counterRunning77); + const count100 = useCounter(23, 1000, counterRunning100); + const displayCount = phase === 'done' ? 100 : phase === 'closing' ? 77 + count100 : count77; + + const runAnimation = useCallback(() => { + if (phase !== 'idle') return; + // Phase 1: fill to 77% + setTimeout(() => { + setFillTransition('width 1.7s cubic-bezier(.4,0,.2,1)'); + setFillWidth('77%'); + setPhase('filling'); + }, 150); + // Phase 2: stall + setTimeout(() => setPhase('stall'), 2100); + // Phase 3: close gap + setTimeout(() => { + setFillTransition('width 1s cubic-bezier(.4,0,.2,1)'); + setFillGradient( + 'linear-gradient(90deg, rgba(221,0,49,.5) 0%, rgba(221,0,49,.38) 70%, rgba(0,64,144,.8) 82%, #004090 100%)' + ); + setFillWidth('100%'); + setPhase('closing'); + }, 3200); + // Phase 4: done + setTimeout(() => setPhase('done'), 4400); + }, [phase]); + + useEffect(() => { + if (inView) runAnimation(); + }, [inView, runAnimation]); + + const showStall = phase === 'stall'; + const showBadge = phase === 'closing' || phase === 'done'; + const showEnd = phase === 'done'; + + return ( +
+ {/* Eyebrow + headline */} + +

+ The Last Mile Problem +

+

+ Most AI projects get close.
+ Almost none ship. +

+

+ The issue is not generating a demo. It is shipping a trustworthy product. +

+
+ + {/* Stat cards */} +
+ {STATS.map((s, i) => ( + +
{s.num}
+

+ {s.label} +

+
+ ))} +
+ + {/* Gap animation */} + + {/* Labels row */} +
+ + Project kickoff + + + ⚠ Teams stall here + + + ✓ Production + +
+ + {/* Track — overflow:hidden clips fill at container boundary, no border-radius artifact */} +
+
+
+ {/* Hatch overlay (gap zone) */} +
+ + + + + + + + +
+
+ {/* Stall pin — outside the overflow:hidden track */} +
+
+
+ 77% +
+
+
+ + {/* Counter row */} +
+ + {displayCount}% + +
+ + StreamResource closes the gap +
+ + 100% + +
+ + {/* Tagline */} +

+ Your backend agent may already work. The frontend and production path is what slips the schedule. +

+ + + +
+ ); +} +``` + +- [ ] **Step 2: Verify it builds** + +```bash +cd apps/website && ../../node_modules/.bin/next build --no-lint 2>&1 | tail -20 +``` + +Expected: build completes, no TypeScript errors + +- [ ] **Step 3: Start dev server and visually verify** + +```bash +cd apps/website && ../../node_modules/.bin/next dev +``` + +Open http://localhost:3000 — temporarily add `` to page.tsx to confirm: +1. Three stat cards appear with staggered entrance +2. Gap bar fills red to 77%, stalls with marker, closes blue to 100% +3. "StreamResource closes the gap" badge appears mid-animation +4. Tagline fades in at the end + +- [ ] **Step 4: Commit** + +```bash +git add apps/website/src/components/landing/ProblemSection.tsx +git commit -m "feat: add ProblemSection with animated gap progress bar" +``` + +--- + +### Task 2: FullStackSection — stack diagram with SVG particle connectors + +**Files:** +- Create: `apps/website/src/components/landing/FullStackSection.tsx` + +- [ ] **Step 1: Create the file** + +```tsx +'use client'; +import { motion } from 'framer-motion'; +import { tokens } from '@cacheplane/design-tokens'; + +const LAYERS = [ + { + id: 'sr', + tag: 'Primitives', + pkg: '@cacheplane/stream-resource', + color: tokens.colors.accent, + rgb: '0,64,144', + bg: 'rgba(0,64,144,0.05)', + border: 'rgba(0,64,144,0.25)', + chips: ['streamResource()', 'messages()', 'interrupt()', 'time travel', 'MockStreamTransport'], + connLabel: 'AIMessage stream', + connColor: 'rgba(0,64,144,.22)', + connFill: '#004090', + connDur: '1.1', + connOffset: '0.55', + pathId: 'path-sr', + }, + { + id: 'chat', + tag: 'UI Layer', + pkg: '@cacheplane/chat', + color: '#5a00c8', + rgb: '90,0,200', + bg: 'rgba(90,0,200,0.05)', + border: 'rgba(90,0,200,0.25)', + chips: ['', '', '', ' prebuilt', ''], + connLabel: 'Signal', + connColor: 'rgba(26,122,64,.22)', + connFill: '#1a7a40', + connDur: '1.3', + connOffset: '0.65', + pathId: 'path-chat', + }, + { + id: 'render', + tag: 'Gen UI', + pkg: '@cacheplane/render', + color: '#1a7a40', + rgb: '26,122,64', + bg: 'rgba(26,122,64,0.05)', + border: 'rgba(26,122,64,0.25)', + chips: ['', 'defineAngularRegistry()', 'signalStateStore()', 'JSON patch streaming'], + connLabel: 'Spec · JSON patch', + connColor: '', + connFill: '', + connDur: '', + connOffset: '', + pathId: '', + }, +]; + +const NOW_ITEMS = [ + 'Text streaming', 'Tool-call cards', 'Interrupt flows', + 'Generative UI specs', 'Thread persistence', 'Deterministic testing', +]; +const SOON_ITEMS = ['File attachments', 'Image inputs & rendering', 'Audio input', 'Multi-modal messages']; +const HORIZON_ITEMS = ['Voice UI primitives', 'Video stream rendering', 'Collaborative agents']; + +function Connector({ layer }: { layer: typeof LAYERS[0] }) { + if (!layer.connFill) return null; + return ( +
+ + + + + + + + + + + + + + + + + + {layer.connLabel} + +
+ ); +} + +export function FullStackSection() { + return ( +
+ +

+ The Complete Angular Agent Stack +

+

+ Three packages. One architecture.
+ Nothing left to wire yourself. +

+

+ LangGraph signals flow top to bottom through each layer — primitives to UI to generative components. +

+
+ + {/* Stack diagram */} + + {/* LangGraph source */} +
+

+ LangGraph Cloud +

+
+ + stream active +
+
+ + {/* Layers with connectors */} + {LAYERS.map((layer, i) => ( +
+ +
+
+ {layer.tag} +
+

+ {layer.pkg} +

+
+ {layer.chips.map(chip => ( + + {chip} + + ))} +
+
+
+ ))} +
+ + {/* Roadmap strip */} + + {/* Now */} +
+

+ Available now +

+ {NOW_ITEMS.map(item => ( +
+ + {item} +
+ ))} +
+ {/* Soon */} +
+

+ Coming soon{' '} + + Planned + +

+ {SOON_ITEMS.map(item => ( +
+ + {item} +
+ ))} +
+ {/* Horizon */} +
+

+ On the horizon +

+ {HORIZON_ITEMS.map(item => ( +
+ + {item} +
+ ))} +
+
+ + +
+ ); +} +``` + +- [ ] **Step 2: Build check** + +```bash +cd apps/website && ../../node_modules/.bin/next build --no-lint 2>&1 | tail -20 +``` + +Expected: no TypeScript errors + +- [ ] **Step 3: Visual verification** + +Temporarily add `` to page.tsx. Confirm: +1. LangGraph source node is visible at top +2. Three layer cards appear: Primitives (blue), UI Layer (purple), Gen UI (green) +3. SVG particles animate continuously down each connector line +4. Roadmap strip shows three columns below + +- [ ] **Step 4: Commit** + +```bash +git add apps/website/src/components/landing/FullStackSection.tsx +git commit -m "feat: add FullStackSection with animated stack diagram and roadmap strip" +``` + +--- + +### Task 3: ChatFeaturesSection — interactive 4-tab chat scenarios + +**Files:** +- Create: `apps/website/src/components/landing/ChatFeaturesSection.tsx` + +- [ ] **Step 1: Create the file** + +```tsx +'use client'; +import { useRef, useCallback, useState } from 'react'; +import { motion } from 'framer-motion'; +import { tokens } from '@cacheplane/design-tokens'; + +// ── Types ────────────────────────────────────────────────────────────────── +type Callout = { tag: string; body: string; color: string; rgb: string }; +type FeatKey = 'stream' | 'genui' | 'tools' | 'interrupt'; + +interface FeatDef { + label: string; + color: string; + rgb: string; + badgeText: string; + left: Callout[]; + right: Callout[]; + question: string; + run: (ctx: ScenarioCtx) => Promise; +} + +interface ScenarioCtx { + addUser: (text: string) => void; + addTyping: () => () => void; + makeAIBubble: () => { bbl: HTMLElement; out: HTMLElement; cur: HTMLElement }; + typeText: (out: HTMLElement, cur: HTMLElement, text: string, ms?: number) => Promise; + appendToMsgs: (el: HTMLElement) => void; + scroll: () => void; + litLeft: (idx: number) => void; + litRight: (idx: number) => void; + wait: (ms: number) => Promise; + token: number; +} + +const wait = (ms: number) => new Promise(r => setTimeout(r, ms)); + +// ── Scenarios ────────────────────────────────────────────────────────────── +async function runStream(ctx: ScenarioCtx) { + const { addUser, addTyping, makeAIBubble, typeText, litLeft, litRight, wait: w, token } = ctx; + await w(300); + addUser('Walk me through LangGraph state.'); + litLeft(0); + await w(800); + const removeTyping = addTyping(); + litLeft(0); + await w(1100); + removeTyping(); + if (ctx.token !== token) return; + const { out, cur } = makeAIBubble(); + litRight(0); + await typeText(out, cur, + 'LangGraph structures your agent as a graph of nodes and edges. Each node returns a partial state update. StreamResource connects to that stream and exposes each update as Angular signals — so your template reacts as tokens arrive.', + 30 + ); + if (ctx.token !== token) return; + cur.style.display = 'none'; +} + +async function runGenUI(ctx: ScenarioCtx) { + const { addUser, addTyping, makeAIBubble, typeText, appendToMsgs, litLeft, litRight, scroll, wait: w, token } = ctx; + await w(300); + addUser('Show Q4 revenue by region.'); + litLeft(0); + await w(800); + const removeTyping = addTyping(); + await w(900); + removeTyping(); + if (ctx.token !== token) return; + const { bbl, out, cur } = makeAIBubble(); + await typeText(out, cur, "Here's the live Q4 breakdown —", 36); + if (ctx.token !== token) return; + cur.style.display = 'none'; + litRight(0); + // Gen UI block + const gui = document.createElement('div'); + gui.style.cssText = 'margin-top:8px;background:rgba(255,255,255,.04);border:1px solid rgba(26,122,64,.25);border-radius:9px;overflow:hidden;animation:sr-fade .28s ease-out'; + gui.innerHTML = ` +
+ + @cacheplane/render · DataTable +
+
`; + bbl.appendChild(gui); + scroll(); + await w(250); + const body = document.getElementById('_gui-body'); + if (!body) return; + const rows: [string, string, string][] = [ + ['North America', '$4.2M', 'color:#4caf50'], + ['Europe', '$3.1M', 'color:#4caf50'], + ['APAC', '$1.8M', 'color:#ef5350'], + ['Total', '$9.1M', ''], + ]; + for (const [l, v, style] of rows) { + if (ctx.token !== token) return; + const r = document.createElement('div'); + r.style.cssText = 'display:flex;justify-content:space-between;padding:3px 0;border-bottom:1px solid rgba(255,255,255,.04);font-size:.71rem;animation:sr-fade .28s ease-out'; + r.innerHTML = `${l}${v}`; + body.appendChild(r); + scroll(); + await w(200); + } +} + +async function runTools(ctx: ScenarioCtx) { + const { addUser, appendToMsgs, makeAIBubble, typeText, litLeft, litRight, scroll, wait: w, token } = ctx; + await w(300); + addUser('Find the latest pricing docs.'); + litLeft(0); + await w(600); + // Tool card fires BEFORE any AI text — correct LangGraph execution order + const card = document.createElement('div'); + card.style.cssText = 'margin-left:31px;background:rgba(255,255,255,.025);border:1px solid rgba(0,64,144,.22);border-radius:9px;padding:9px 12px;animation:sr-fade .28s ease-out'; + card.innerHTML = ` +
+
🔍
+ search_docs + ● running +
+ + + `; + appendToMsgs(card); + litRight(0); + scroll(); + await w(350); + for (const id of ['_ts1', '_ts2']) { + if (ctx.token !== token) return; + const el = document.getElementById(id); if (el) el.style.display = ''; + await w(400); scroll(); + } + await w(500); + if (ctx.token !== token) return; + const ts3 = document.getElementById('_ts3'); if (ts3) ts3.style.display = ''; + const tst = document.getElementById('_tool-st'); + if (tst) { tst.textContent = '✓ done'; tst.style.color = '#4caf50'; } + scroll(); + await w(700); + if (ctx.token !== token) return; + const { out, cur } = makeAIBubble(); + await typeText(out, cur, + 'Found 3 pricing docs. The most recent is the Enterprise Tier sheet from last quarter — updated volume discount tiers included.', + 33 + ); + if (ctx.token !== token) return; + cur.style.display = 'none'; +} + +async function runInterrupt(ctx: ScenarioCtx) { + const { addUser, addTyping, makeAIBubble, typeText, appendToMsgs, litLeft, litRight, scroll, wait: w, token } = ctx; + await w(300); + addUser('Deploy the service to production.'); + litLeft(0); + await w(800); + const removeTyping = addTyping(); + await w(900); + removeTyping(); + if (ctx.token !== token) return; + const { out, cur } = makeAIBubble(); + await typeText(out, cur, 'Preparing the deployment — ', 38); + if (ctx.token !== token) return; + cur.style.display = 'none'; + await w(300); + litRight(0); + const panel = document.createElement('div'); + panel.style.cssText = 'margin:0 0 0 31px;background:rgba(255,160,50,.05);border:1px solid rgba(255,160,50,.2);border-radius:11px;padding:12px 14px;animation:sr-fade .28s ease-out'; + panel.innerHTML = ` +
⚠ Approval required
+

+ Deploy api-service v2.1.0 to production? This will affect live traffic across all regions. +

+
+ + + +
`; + appendToMsgs(panel); + scroll(); +} + +// ── Feature definitions ──────────────────────────────────────────────────── +const FEATURES: Record = { + stream: { + label: 'Streaming', color: '#6C8EFF', rgb: '108,142,255', badgeText: 'chat-messages', + left: [{ tag: '', body: 'Token-by-token rendering with live cursor. Signals-native, fully OnPush.', color: '#6C8EFF', rgb: '108,142,255' }], + right: [{ tag: 'isStreaming()', body: 'Reactive signal — true while tokens arrive. Drive spinners and disable inputs without polling.', color: '#6C8EFF', rgb: '108,142,255' }], + question: 'Walk me through LangGraph state.', run: runStream, + }, + genui: { + label: 'Generative UI', color: '#1a7a40', rgb: '26,122,64', badgeText: 'chat-generative-ui', + left: [{ tag: '', body: 'Intercepts onCustomEvent from the agent stream. Wraps and your component registry.', color: '#1a7a40', rgb: '26,122,64' }], + right: [{ tag: '', body: 'Resolves your Angular component by name, passes props as signals, streams JSON patch updates.', color: '#1a7a40', rgb: '26,122,64' }], + question: 'Show Q4 revenue by region.', run: runGenUI, + }, + tools: { + label: 'Tool Calls', color: tokens.colors.accent, rgb: '0,64,144', badgeText: 'chat-tool-call-card', + left: [{ tag: '', body: 'Headless wrapper exposing tool execution state as signals. Compose your own tool UI.', color: tokens.colors.accent, rgb: '0,64,144' }], + right: [{ tag: '', body: 'Prebuilt card with expandable steps, live progress rows, and collapsible result state.', color: tokens.colors.accent, rgb: '0,64,144' }], + question: 'Find the latest pricing docs.', run: runTools, + }, + interrupt: { + label: 'Interrupt', color: '#FFA726', rgb: '255,167,38', badgeText: 'chat-interrupt-panel', + left: [{ tag: '', body: 'Headless interrupt state. Exposes interrupt() signal — bring your own approval UI.', color: '#FFA726', rgb: '255,167,38' }], + right: [{ tag: '', body: 'Prebuilt approval card. Wired to LangGraph interrupt resume — approve, edit, or cancel.', color: '#FFA726', rgb: '255,167,38' }], + question: 'Deploy the service to production.', run: runInterrupt, + }, +}; + +// ── Component ────────────────────────────────────────────────────────────── +export function ChatFeaturesSection() { + const [activeFeat, setActiveFeat] = useState('stream'); + const tokenRef = useRef(0); + const msgsRef = useRef(null); + + const buildCtx = useCallback((token: number): ScenarioCtx => { + const msgs = msgsRef.current!; + const scroll = () => { msgs.scrollTop = msgs.scrollHeight; }; + + const addUser = (text: string) => { + const row = document.createElement('div'); + row.style.cssText = 'display:flex;gap:7px;align-items:flex-start;flex-direction:row-reverse;animation:sr-fade .28s ease-out'; + row.innerHTML = ` +
U
+
${text}
`; + msgs.appendChild(row); scroll(); + }; + + const addTyping = () => { + const row = document.createElement('div'); + row.style.cssText = 'display:flex;align-items:center;gap:7px;animation:sr-fade .28s ease-out'; + row.innerHTML = ` +
AI
+
+ + + +
`; + msgs.appendChild(row); scroll(); + return () => row.remove(); + }; + + const makeAIBubble = () => { + const row = document.createElement('div'); + row.style.cssText = 'display:flex;gap:7px;align-items:flex-start;animation:sr-fade .28s ease-out'; + const av = document.createElement('div'); + av.style.cssText = 'width:24px;height:24px;border-radius:50%;background:rgba(108,142,255,.18);color:#6C8EFF;display:flex;align-items:center;justify-content:center;font-size:.58rem;font-weight:700;flex-shrink:0;margin-top:2px'; + av.textContent = 'AI'; + const bbl = document.createElement('div'); + bbl.style.cssText = 'padding:8px 12px;font-size:.78rem;line-height:1.55;max-width:270px;background:rgba(108,142,255,.1);border:1px solid rgba(108,142,255,.13);color:#c8ccee;border-radius:4px 13px 13px 13px'; + const out = document.createElement('span'); + const cur = document.createElement('span'); + cur.style.cssText = 'display:inline-block;width:2px;height:.8em;background:#6C8EFF;vertical-align:text-bottom;margin-left:1px;border-radius:1px;animation:sr-blink .85s step-end infinite'; + bbl.appendChild(out); bbl.appendChild(cur); + row.appendChild(av); row.appendChild(bbl); + msgs.appendChild(row); scroll(); + return { bbl, out, cur } as unknown as { bbl: HTMLElement; out: HTMLElement; cur: HTMLElement }; + }; + + const typeText = async (out: HTMLElement, cur: HTMLElement, text: string, ms = 34) => { + for (const ch of text) { + if (tokenRef.current !== token) return; + const s = document.createElement('span'); s.textContent = ch; + out.parentNode!.insertBefore(s, cur); + scroll(); + await wait(ms); + } + }; + + const appendToMsgs = (el: HTMLElement) => { msgs.appendChild(el); scroll(); }; + + const litLeft = (idx: number) => { + const items = document.querySelectorAll('#feat-left .feat-co'); + if (items[idx]) items[idx].classList.add('feat-co-lit'); + }; + const litRight = (idx: number) => { + const items = document.querySelectorAll('#feat-right .feat-co'); + if (items[idx]) items[idx].classList.add('feat-co-lit'); + }; + + return { addUser, addTyping, makeAIBubble, typeText, appendToMsgs, scroll, litLeft, litRight, wait, token }; + }, []); + + const switchFeat = useCallback((key: FeatKey) => { + tokenRef.current += 1; + const token = tokenRef.current; + setActiveFeat(key); + const msgs = msgsRef.current; + if (msgs) msgs.innerHTML = ''; + // Clear lit callouts + document.querySelectorAll('.feat-co-lit').forEach(el => el.classList.remove('feat-co-lit')); + setTimeout(() => { + const ctx = buildCtx(token); + FEATURES[key].run(ctx); + }, 350); + }, [buildCtx]); + + // Auto-start streaming scenario on mount + const startedRef = useRef(false); + const containerRef = useRef(null); + + // Use intersection observer to start first scenario when visible + const handleStart = useCallback(() => { + if (startedRef.current) return; + startedRef.current = true; + setTimeout(() => { + tokenRef.current += 1; + const token = tokenRef.current; + const ctx = buildCtx(token); + FEATURES['stream'].run(ctx); + }, 500); + }, [buildCtx]); + + const feat = FEATURES[activeFeat]; + + return ( + + {/* Eyebrow + headline */} +
+

+ @cacheplane/chat +

+

+ Every agent UI primitive,
+ ready to compose. +

+

+ Click a feature to see the component in action. +

+ {/* Feature tabs */} +
+ {(Object.keys(FEATURES) as FeatKey[]).map(key => { + const f = FEATURES[key]; + const isActive = activeFeat === key; + return ( + + ); + })} +
+
+ + {/* 3-col layout */} +
+ + {/* Left callouts */} +
+ {feat.left.map((co, i) => ( +
+

+ {co.tag} +

+

{co.body}

+
+ ))} +
+ + {/* Chat window */} +
+ {/* Title bar */} +
+
+ {['#FF5F57','#FFBD2E','#28CA41'].map(c =>
)} +
+
+ angular agent +
+
+ {feat.badgeText} +
+
+ {/* Messages */} +
+ {/* Input bar */} +
+ + +
+
+ + {/* Right callouts */} +
+ {feat.right.map((co, i) => ( +
+

+ {co.tag} +

+

{co.body}

+
+ ))} +
+ +
+ + + + ); +} +``` + +- [ ] **Step 2: Build check** + +```bash +cd apps/website && ../../node_modules/.bin/next build --no-lint 2>&1 | tail -20 +``` + +Expected: no TypeScript errors + +- [ ] **Step 3: Visual verification** + +Add `` to page.tsx temporarily. Confirm: +1. Four tab buttons render: Streaming, Generative UI, Tool Calls, Interrupt +2. Streaming tab: user message → typing → AI tokens stream in +3. Gen UI tab: AI text → DataTable rows stream in inline +4. Tool Calls tab: tool card appears first, steps progress, then AI responds +5. Interrupt tab: AI starts sentence → pauses → approval panel appears +6. Callout cards light up on correct step in each scenario +7. Switching tabs clears the chat and starts the new scenario + +- [ ] **Step 4: Commit** + +```bash +git add apps/website/src/components/landing/ChatFeaturesSection.tsx +git commit -m "feat: add ChatFeaturesSection with 4 interactive chat scenarios" +``` + +--- + +### Task 4: FairComparisonSection — static comparison table + +**Files:** +- Create: `apps/website/src/components/landing/FairComparisonSection.tsx` + +- [ ] **Step 1: Create the file** + +```tsx +'use client'; +import { motion } from 'framer-motion'; +import { tokens } from '@cacheplane/design-tokens'; + +const ROWS = [ + { + capability: 'Token streaming', + without: 'Custom SSE wiring + zone management', + with: 'streamResource() signal, zero boilerplate', + }, + { + capability: 'Thread persistence', + without: 'Manual localStorage + API calls', + with: 'threadId signal + onThreadId callback', + }, + { + capability: 'Interrupt flows', + without: 'Custom polling or WebSocket', + with: 'interrupt() signal + resume built in', + }, + { + capability: 'Tool-call rendering', + without: 'Custom event parsing', + with: ' or headless ', + }, + { + capability: 'Generative UI', + without: 'No established pattern', + with: ' + + registry', + }, + { + capability: 'Deterministic testing', + without: 'Mock HTTP + tick management', + with: 'MockStreamTransport + writable signals', + }, + { + capability: 'Human approval UI', + without: 'Build from scratch', + with: '', + }, + { + capability: 'Full chat layout', + without: 'Build from scratch', + with: ' drop-in', + }, +]; + +export function FairComparisonSection() { + return ( +
+ +

+ A fair comparison +

+

+ What StreamResource adds +

+

+ LangChain and LangGraph are excellent. This is what the Angular production layer provides on top. +

+
+ + + {/* Table header */} +
+ {['Capability', 'LangChain + Angular alone', 'With StreamResource'].map((h, i) => ( +
+ {h} +
+ ))} +
+ {/* Rows */} + {ROWS.map((row, i) => ( + +
+ {row.capability} +
+
+ {row.without} +
+
+ + + {row.with} + +
+
+ ))} +
+
+ ); +} +``` + +- [ ] **Step 2: Build check** + +```bash +cd apps/website && ../../node_modules/.bin/next build --no-lint 2>&1 | tail -20 +``` + +- [ ] **Step 3: Visual verification** + +Add `` to page.tsx. Confirm 8 rows render, "With" column shows ✓ in green with blue text, header row shows "With StreamResource" in accent blue. + +- [ ] **Step 4: Commit** + +```bash +git add apps/website/src/components/landing/FairComparisonSection.tsx +git commit -m "feat: add FairComparisonSection comparison table" +``` + +--- + +### Task 5: Update FeatureStrip copy + +**Files:** +- Modify: `apps/website/src/components/landing/FeatureStrip.tsx` + +- [ ] **Step 1: Open the file and find the Generative UI entry** + +The current `FEATURES` array in `FeatureStrip.tsx` contains entries. Find the entry about "Generative UI" or any entry that implies StreamResource is the only solution for generative UI in Angular. Replace it with: + +```tsx +{ icon: '🎨', title: 'Generative UI', desc: 'Agent-emitted Angular components via @cacheplane/render. Your component registry, your design — rendered inline from a JSON spec.' }, +``` + +If no such entry exists, verify by reading the file: + +```bash +grep -n "Generative\|generative\|no established" apps/website/src/components/landing/FeatureStrip.tsx +``` + +If the grep returns nothing, skip to Step 3. + +- [ ] **Step 2: Apply the replacement** + +Open `apps/website/src/components/landing/FeatureStrip.tsx`. In the `FEATURES` array, if there is a feature with text like "no established pattern" or "no established Angular pattern exists" replace only that feature entry's `desc` field with: + +``` +'Agent-emitted Angular components via @cacheplane/render. Your component registry, your design — rendered inline from a JSON spec.' +``` + +Leave all other entries untouched. + +- [ ] **Step 3: Build check** + +```bash +cd apps/website && ../../node_modules/.bin/next build --no-lint 2>&1 | tail -10 +``` + +- [ ] **Step 4: Commit** + +```bash +git add apps/website/src/components/landing/FeatureStrip.tsx +git commit -m "fix: update FeatureStrip generative UI copy to remove exclusivity claim" +``` + +--- + +### Task 6: Wire all sections into page.tsx + +**Files:** +- Modify: `apps/website/src/app/page.tsx` + +- [ ] **Step 1: Add imports** + +Add these four imports to `apps/website/src/app/page.tsx` alongside the existing imports: + +```tsx +import { ProblemSection } from '../components/landing/ProblemSection'; +import { FullStackSection } from '../components/landing/FullStackSection'; +import { ChatFeaturesSection } from '../components/landing/ChatFeaturesSection'; +import { FairComparisonSection } from '../components/landing/FairComparisonSection'; +``` + +- [ ] **Step 2: Insert sections in correct order** + +The full updated section order in the JSX: + +```tsx + + +{/* New: problem narrative */} + + + +{/* Existing */} + + + + +{/* New: comparison */} + +{/* Existing */} + + + +``` + +- [ ] **Step 3: Add two more ambient gradient blobs** for the extended page height (new sections add ~2400px of content) + +Add after the existing five blobs: + +```tsx +
+
+``` + +- [ ] **Step 4: Build check** + +```bash +cd apps/website && ../../node_modules/.bin/next build --no-lint 2>&1 | tail -20 +``` + +Expected: successful build, no type errors + +- [ ] **Step 5: Full visual verification** + +```bash +cd apps/website && ../../node_modules/.bin/next dev +``` + +Open http://localhost:3000. Scroll top to bottom and verify: +1. After StatsStrip: ProblemSection with 3 stat cards + gap animation +2. After ProblemSection: FullStackSection stack with animated particles + roadmap strip +3. After FullStackSection: ChatFeaturesSection with 4 tabs, each scenario runs correctly +4. After DeepAgentsShowcase: FairComparisonSection with 8-row table +5. All existing sections still present and unchanged +6. No console errors + +- [ ] **Step 6: Commit** + +```bash +git add apps/website/src/app/page.tsx +git commit -m "feat: wire ProblemSection, FullStackSection, ChatFeaturesSection, FairComparisonSection into landing page" +``` diff --git a/docs/superpowers/plans/2026-04-05-whitepaper-pipeline.md b/docs/superpowers/plans/2026-04-05-whitepaper-pipeline.md new file mode 100644 index 000000000..b862f1cb6 --- /dev/null +++ b/docs/superpowers/plans/2026-04-05-whitepaper-pipeline.md @@ -0,0 +1,746 @@ +# White Paper Pipeline Implementation Plan + +> **For agentic workers:** REQUIRED SUB-SKILL: Use superpowers:subagent-driven-development (recommended) or superpowers:executing-plans to implement this plan task-by-task. Steps use checkbox (`- [ ]`) syntax for tracking. + +**Goal:** Build a one-time Anthropic SDK script that generates a 6-chapter PDF white paper, plus a free-download `WhitePaperSection` component on the landing page with an optional lead-gen form. + +**Architecture:** `apps/website/scripts/generate-whitepaper.ts` calls Anthropic API for 6 chapters shaped by the production-readiness assessment framework, builds a self-contained HTML document, then uses Puppeteer to render it to `apps/website/public/whitepaper.pdf`. A `WhitePaperSection` React component provides a direct download link and optional name/email form backed by a Next.js API route that appends to a local NDJSON file. + +**Tech Stack:** `@anthropic-ai/sdk` (already in devDependencies at `^0.79.0`), `puppeteer` (add as devDependency), `npx tsx` for script execution, Next.js API route for form submission + +--- + +## File Map + +| Action | Path | Responsibility | +|--------|------|----------------| +| Create | `apps/website/scripts/generate-whitepaper.ts` | Anthropic API calls + HTML build + Puppeteer PDF render | +| Create | `apps/website/src/components/landing/WhitePaperSection.tsx` | Download button + optional lead-gen form | +| Create | `apps/website/src/app/api/whitepaper-signup/route.ts` | POST handler, appends to NDJSON file | +| Modify | `package.json` (root) | Add `generate-whitepaper` script + `puppeteer` devDependency | +| Modify | `apps/website/src/app/page.tsx` | Insert `` | + +--- + +### Task 1: Add puppeteer and generate-whitepaper script to package.json + +**Files:** +- Modify: `package.json` (root, at `/Users/blove/repos/stream-resource/package.json`) + +- [ ] **Step 1: Add puppeteer devDependency** + +In the root `package.json`, add `"puppeteer": "^22.0.0"` to `devDependencies`. + +- [ ] **Step 2: Add generate-whitepaper script** + +In the root `package.json` `scripts` section, add: + +```json +"generate-whitepaper": "npx tsx apps/website/scripts/generate-whitepaper.ts" +``` + +The scripts section should look like: + +```json +"scripts": { + "generate-agent-context": "npx tsx --tsconfig apps/website/tsconfig.json apps/website/scripts/generate-agent-context.ts", + "generate-api-docs": "npx tsx apps/website/scripts/generate-api-docs.ts", + "generate-narrative-docs": "npx tsx apps/website/scripts/generate-narrative-docs.ts", + "generate-docs": "npm run generate-api-docs && npm run generate-narrative-docs", + "generate-whitepaper": "npx tsx apps/website/scripts/generate-whitepaper.ts" +} +``` + +- [ ] **Step 3: Install puppeteer** + +```bash +cd /Users/blove/repos/stream-resource && npm install +``` + +Expected: `node_modules/puppeteer` directory appears, no install errors + +- [ ] **Step 4: Commit** + +```bash +git add package.json package-lock.json +git commit -m "chore: add puppeteer devDependency and generate-whitepaper script" +``` + +--- + +### Task 2: Create the generate-whitepaper.ts script + +**Files:** +- Create: `apps/website/scripts/generate-whitepaper.ts` + +- [ ] **Step 1: Create the script** + +```typescript +import Anthropic from '@anthropic-ai/sdk'; +import fs from 'fs'; +import path from 'path'; +import puppeteer from 'puppeteer'; + +const client = new Anthropic(); +const MODEL = process.env['ANTHROPIC_MODEL'] ?? 'claude-opus-4-5'; +const OUTPUT_PDF = 'apps/website/public/whitepaper.pdf'; +const OUTPUT_HTML = 'apps/website/public/whitepaper-preview.html'; + +// ── Chapter definitions ────────────────────────────────────────────────── +const CHAPTERS = [ + { + id: 'streaming-state', + title: 'Streaming State Management', + prompt: `Write a 400-600 word chapter for an engineering white paper titled "From Prototype to Production: The Angular Agent Readiness Guide". + +Chapter topic: Streaming State Management + +Context: Angular teams building LangGraph-powered agents must wire SSE event streams into reactive UI. Without the right primitives, they end up with custom zone-patching, manual subscription management, and brittle token accumulation logic that breaks under load. + +Cover: +- Why streaming state is hard in Angular (zone.js, change detection, timing) +- The signals-native approach: how streamResource() exposes messages() as Signal +- How isStreaming() lets developers drive loading UI without polling +- Code example: minimal streamResource() setup (TypeScript snippet, 8-12 lines) +- Production checklist item: "Are your message signals OnPush-compatible?" + +Tone: Direct, technical, peer-to-peer. No fluff. Audience is senior Angular engineers.`, + }, + { + id: 'thread-persistence', + title: 'Thread Persistence', + prompt: `Write a 400-600 word chapter for an engineering white paper titled "From Prototype to Production: The Angular Agent Readiness Guide". + +Chapter topic: Thread Persistence + +Context: Demos work with ephemeral state. Production agents need conversation history that survives page refreshes, tab switches, and navigation — wired to LangGraph's MemorySaver backend. + +Cover: +- Why stateless agent UIs fail in production +- The threadId signal and onThreadId callback pattern +- How to persist threadId to localStorage and restore on mount +- Thread list UI and switching between conversations +- Code example: provideStreamResource() with threadId (8-12 lines) +- Production checklist item: "Does your agent UI resume threads correctly after a browser refresh?" + +Tone: Direct, technical, peer-to-peer. No fluff. Audience is senior Angular engineers.`, + }, + { + id: 'tool-call-rendering', + title: 'Tool-Call Rendering', + prompt: `Write a 400-600 word chapter for an engineering white paper titled "From Prototype to Production: The Angular Agent Readiness Guide". + +Chapter topic: Tool-Call Rendering + +Context: LangGraph agents invoke tools mid-stream. The UI needs to show tool execution state in real time — steps appearing as the tool runs, a final result, and collapsible history — without parsing raw SSE events by hand. + +Cover: +- What tool call events look like in the raw stream +- Why hand-parsing is fragile and hard to test +- The headless primitive and prebuilt option +- Progressive disclosure: showing steps live, collapsing on completion +- Code example: binding (8-12 lines of Angular template) +- Production checklist item: "Do your tool call cards handle partial step state during streaming?" + +Tone: Direct, technical, peer-to-peer. No fluff. Audience is senior Angular engineers.`, + }, + { + id: 'human-approval-flows', + title: 'Human Approval Flows', + prompt: `Write a 400-600 word chapter for an engineering white paper titled "From Prototype to Production: The Angular Agent Readiness Guide". + +Chapter topic: Human Approval Flows (Interrupts) + +Context: Production agents that take consequential actions — sending emails, deploying services, modifying data — must pause for human approval before proceeding. This requires a tight loop between LangGraph's interrupt() primitive and Angular UI. + +Cover: +- The LangGraph interrupt() and Command.RESUME pattern +- Why polling and custom websocket approaches are brittle +- The interrupt() signal in streamResource() and how it maps to approval state +- headless and prebuilt +- The three approval actions: approve, edit, cancel — and how each maps to a resume command +- Code example: interrupt signal binding (8-12 lines) +- Production checklist item: "Can your agent UI recover gracefully if a user cancels an interrupt?" + +Tone: Direct, technical, peer-to-peer. No fluff. Audience is senior Angular engineers.`, + }, + { + id: 'generative-ui', + title: 'Generative UI', + prompt: `Write a 400-600 word chapter for an engineering white paper titled "From Prototype to Production: The Angular Agent Readiness Guide". + +Chapter topic: Generative UI + +Context: The most advanced production agents emit structured UI specs — not just text. A data analysis agent might render a live table. A booking agent might render a reservation form. Without a framework for this, teams either hardcode component logic into the agent or skip the feature entirely. + +Cover: +- The onCustomEvent pattern in LangGraph: how agents emit structured data +- The @cacheplane/render approach: json-render specs, defineAngularRegistry(), +- How JSON patch streaming enables progressive UI updates (rows appearing as data arrives) +- The registry pattern: decoupling agent from component implementation +- Code example: defineAngularRegistry() registration (8-12 lines) +- Production checklist item: "Can your agent emit UI components without tight coupling to the frontend codebase?" + +Tone: Direct, technical, peer-to-peer. No fluff. Audience is senior Angular engineers.`, + }, + { + id: 'deterministic-testing', + title: 'Deterministic Testing', + prompt: `Write a 400-600 word chapter for an engineering white paper titled "From Prototype to Production: The Angular Agent Readiness Guide". + +Chapter topic: Deterministic Testing + +Context: Agent UIs are notoriously hard to test because they depend on live LLM responses. Flaky tests, slow CI, and inability to reproduce edge cases are the main reasons agent UIs ship with low confidence. + +Cover: +- Why testing agent components against real LLM APIs is impractical +- The MockStreamTransport approach: scripted event sequences, no server needed +- createMockStreamResourceRef(): writable signals you control directly in tests +- How to test streaming, interrupts, tool calls, and generative UI in isolation +- Code example: createMockStreamResourceRef() test pattern (10-14 lines) +- Production checklist item: "Do your agent component tests run offline and complete in under 100ms each?" + +Tone: Direct, technical, peer-to-peer. No fluff. Audience is senior Angular engineers.`, + }, +]; + +// ── Markdown to HTML converter ─────────────────────────────────────────── +function mdToHTML(md: string): string { + return md + .replace(/```[\w]*\n([\s\S]*?)```/g, '
$1
') + .replace(/^### (.+)$/gm, '

$1

') + .replace(/^## (.+)$/gm, '

$1

') + .replace(/\*\*(.+?)\*\*/g, '$1') + .replace(/^- (.+)$/gm, '
  • $1
  • ') + .replace(/(
  • [^\n]+<\/li>\n?)+/g, match => `
      ${match}
    `) + .split('\n\n') + .map(block => { + if (block.startsWith('${trimmed}

    ` : ''; + }) + .join('\n'); +} + +// ── HTML builder ───────────────────────────────────────────────────────── +function buildHTML(chapters: Array<{ title: string; content: string }>): string { + const tocHTML = chapters.map((ch, i) => ` +
    + ${String(i + 1).padStart(2, '0')} + ${ch.title} +
    `).join(''); + + const chaptersHTML = chapters.map((ch, i) => ` +
    +
    Chapter ${i + 1}
    +

    ${ch.title}

    +
    ${mdToHTML(ch.content)}
    +
    `).join(''); + + return ` + + + + + + + + + + +
    +
    StreamResource · Production Readiness Guide
    +

    From Prototype
    to Production

    +

    The Angular Agent Readiness Guide

    +
    cacheplane.io · ${new Date().getFullYear()}
    +
    + + +
    +

    Contents

    + ${tocHTML} +
    + + +${chaptersHTML} + + +`; +} + +// ── PDF renderer ───────────────────────────────────────────────────────── +async function renderPDF(html: string, outputPath: string): Promise { + console.log(' Launching browser for PDF render...'); + const browser = await puppeteer.launch({ headless: true }); + const page = await browser.newPage(); + await page.setContent(html, { waitUntil: 'networkidle0' }); + await page.pdf({ + path: outputPath, + format: 'A4', + printBackground: true, + margin: { top: '0', right: '0', bottom: '0', left: '0' }, + }); + await browser.close(); +} + +// ── Chapter generator ──────────────────────────────────────────────────── +async function generateChapter(chapter: typeof CHAPTERS[0]): Promise { + console.log(` Generating: ${chapter.title}...`); + const message = await client.messages.create({ + model: MODEL, + max_tokens: 1500, + messages: [{ role: 'user', content: chapter.prompt }], + }); + const content = message.content[0]; + if (content.type !== 'text') throw new Error(`Unexpected content type: ${content.type}`); + return content.text; +} + +// ── Main ───────────────────────────────────────────────────────────────── +async function main() { + console.log('StreamResource White Paper Generator\n'); + console.log(`Model: ${MODEL}`); + console.log(`Output: ${OUTPUT_PDF}\n`); + + const generatedChapters: Array<{ title: string; content: string }> = []; + + for (const chapter of CHAPTERS) { + const content = await generateChapter(chapter); + generatedChapters.push({ title: chapter.title, content }); + } + + console.log('\nBuilding HTML document...'); + const html = buildHTML(generatedChapters); + fs.mkdirSync(path.dirname(OUTPUT_HTML), { recursive: true }); + fs.writeFileSync(OUTPUT_HTML, html, 'utf8'); + console.log(` HTML preview: ${OUTPUT_HTML}`); + + console.log('Rendering PDF...'); + fs.mkdirSync(path.dirname(OUTPUT_PDF), { recursive: true }); + await renderPDF(html, OUTPUT_PDF); + + const stat = fs.statSync(OUTPUT_PDF); + console.log(`\n✓ Done. PDF saved to ${OUTPUT_PDF} (${Math.round(stat.size / 1024)}KB)`); + console.log(`✓ HTML preview: ${OUTPUT_HTML}`); +} + +main().catch(err => { + console.error('Generation failed:', err); + process.exit(1); +}); +``` + +- [ ] **Step 2: Type-check the script** + +```bash +cd /Users/blove/repos/stream-resource && npx tsx --tsconfig apps/website/tsconfig.json --check apps/website/scripts/generate-whitepaper.ts 2>&1 | tail -20 +``` + +Expected: no type errors. If puppeteer types are missing, run: `npm install --save-dev @types/puppeteer` (puppeteer v22 includes its own types, so this likely isn't needed) + +- [ ] **Step 3: Dry-run with a single chapter (optional smoke test)** + +Temporarily edit the script to only include the first chapter (`const CHAPTERS = [CHAPTERS_FULL[0]]`), run it, then revert. This confirms the Anthropic API call and Puppeteer render work before spending credits on all 6 chapters. + +```bash +ANTHROPIC_MODEL=claude-haiku-4-5 npm run generate-whitepaper +``` + +Expected: `apps/website/public/whitepaper.pdf` appears, file size > 50KB + +- [ ] **Step 4: Run full generation** + +```bash +npm run generate-whitepaper +``` + +Expected output: +``` +StreamResource White Paper Generator + +Model: claude-opus-4-5 +Output: apps/website/public/whitepaper.pdf + + Generating: Streaming State Management... + Generating: Thread Persistence... + Generating: Tool-Call Rendering... + Generating: Human Approval Flows... + Generating: Generative UI... + Generating: Deterministic Testing... + +Building HTML document... + HTML preview: apps/website/public/whitepaper-preview.html +Rendering PDF... + Launching browser for PDF render... + +✓ Done. PDF saved to apps/website/public/whitepaper.pdf (XXX KB) +✓ HTML preview: apps/website/public/whitepaper-preview.html +``` + +Open `apps/website/public/whitepaper-preview.html` in a browser and verify: cover page, table of contents, 6 chapters with correct titles and code examples. + +- [ ] **Step 5: Commit** + +```bash +git add apps/website/scripts/generate-whitepaper.ts apps/website/public/whitepaper.pdf apps/website/public/whitepaper-preview.html +git commit -m "feat: add whitepaper generation script and generated PDF" +``` + +--- + +### Task 3: Create the API route for optional lead-gen form + +**Files:** +- Create: `apps/website/src/app/api/whitepaper-signup/route.ts` + +- [ ] **Step 1: Create the directory and route file** + +```typescript +// apps/website/src/app/api/whitepaper-signup/route.ts +import { NextRequest, NextResponse } from 'next/server'; +import fs from 'fs'; +import path from 'path'; + +const SIGNUPS_FILE = path.join(process.cwd(), 'data', 'whitepaper-signups.ndjson'); + +export async function POST(req: NextRequest) { + let body: { name?: string; email?: string }; + try { + body = await req.json(); + } catch { + return NextResponse.json({ error: 'Invalid JSON' }, { status: 400 }); + } + + const { name = '', email = '' } = body; + if (!email || !email.includes('@')) { + return NextResponse.json({ error: 'Valid email required' }, { status: 400 }); + } + + const entry = JSON.stringify({ name: name.trim(), email: email.trim(), ts: new Date().toISOString() }) + '\n'; + try { + fs.mkdirSync(path.dirname(SIGNUPS_FILE), { recursive: true }); + fs.appendFileSync(SIGNUPS_FILE, entry, 'utf8'); + } catch (err) { + console.error('Failed to write signup:', err); + return NextResponse.json({ error: 'Internal error' }, { status: 500 }); + } + + return NextResponse.json({ ok: true }); +} +``` + +- [ ] **Step 2: Add data directory to .gitignore** + +Check if `/Users/blove/repos/stream-resource/.gitignore` or `apps/website/.gitignore` exists and add the signups file path. The PDF should be committed but signup data should not: + +```bash +echo "apps/website/data/" >> .gitignore +``` + +- [ ] **Step 3: Build check** + +```bash +cd apps/website && ../../node_modules/.bin/next build --no-lint 2>&1 | tail -20 +``` + +- [ ] **Step 4: Test the route manually** + +Start dev server, then in a separate terminal: + +```bash +curl -X POST http://localhost:3000/api/whitepaper-signup \ + -H "Content-Type: application/json" \ + -d '{"name":"Test User","email":"test@example.com"}' +``` + +Expected response: `{"ok":true}` + +Check file was created: +```bash +cat apps/website/data/whitepaper-signups.ndjson +``` + +Expected: `{"name":"Test User","email":"test@example.com","ts":"2026-..."}` + +- [ ] **Step 5: Commit** + +```bash +git add apps/website/src/app/api/whitepaper-signup/route.ts .gitignore +git commit -m "feat: add whitepaper signup API route with NDJSON persistence" +``` + +--- + +### Task 4: Create WhitePaperSection component + +**Files:** +- Create: `apps/website/src/components/landing/WhitePaperSection.tsx` + +- [ ] **Step 1: Create the file** + +```tsx +'use client'; +import { useState } from 'react'; +import { motion } from 'framer-motion'; +import { tokens } from '@cacheplane/design-tokens'; + +type FormState = 'idle' | 'submitting' | 'done' | 'error'; + +export function WhitePaperSection() { + const [name, setName] = useState(''); + const [email, setEmail] = useState(''); + const [formState, setFormState] = useState('idle'); + + const handleSubmit = async (e: React.FormEvent) => { + e.preventDefault(); + if (!email) return; + setFormState('submitting'); + try { + const res = await fetch('/api/whitepaper-signup', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ name, email }), + }); + if (!res.ok) throw new Error('Server error'); + setFormState('done'); + } catch { + setFormState('error'); + } + }; + + const inputStyle: React.CSSProperties = { + width: '100%', + background: 'rgba(255,255,255,0.7)', + border: `1px solid ${tokens.glass.border}`, + borderRadius: 10, + padding: '10px 14px', + fontSize: '0.88rem', + color: tokens.colors.textPrimary, + fontFamily: 'Inter, sans-serif', + outline: 'none', + marginBottom: 10, + backdropFilter: `blur(${tokens.glass.blur})`, + }; + + return ( +
    + + {/* Left — download CTA */} + + + {/* Right — optional form */} +
    +

    + Optional — Get notified of updates +

    + + {formState === 'done' ? ( +
    + ✓ Thanks! We'll reach out when the guide is updated. +
    + ) : ( +
    + setName(e.target.value)} + disabled={formState === 'submitting'} + /> + setEmail(e.target.value)} + required + disabled={formState === 'submitting'} + /> + {formState === 'error' && ( +

    + Something went wrong — please try again. +

    + )} + +
    + )} +
    + +
    + ); +} +``` + +- [ ] **Step 2: Build check** + +```bash +cd apps/website && ../../node_modules/.bin/next build --no-lint 2>&1 | tail -20 +``` + +- [ ] **Step 3: Visual verification** + +Add `` to page.tsx temporarily. Confirm: +1. Two-column layout renders: download CTA on left, form on right +2. "↓ Download PDF" button is a real link to `/whitepaper.pdf` +3. Submitting form with valid email shows success state +4. Submitting without email shows required validation + +- [ ] **Step 4: Commit** + +```bash +git add apps/website/src/components/landing/WhitePaperSection.tsx +git commit -m "feat: add WhitePaperSection with free download and optional lead-gen form" +``` + +--- + +### Task 5: Wire WhitePaperSection into page.tsx + +**Files:** +- Modify: `apps/website/src/app/page.tsx` + +- [ ] **Step 1: Add import** + +Add to the imports at the top of `apps/website/src/app/page.tsx`: + +```tsx +import { WhitePaperSection } from '../components/landing/WhitePaperSection'; +``` + +- [ ] **Step 2: Insert section** + +Placement: after ``, before ``. + +If `FairComparisonSection` hasn't been added yet (this plan runs independently of the narrative redesign plan), place it after ``, before ``. + +```tsx +{/* White paper free download */} + +``` + +- [ ] **Step 3: Final build check** + +```bash +cd apps/website && ../../node_modules/.bin/next build --no-lint 2>&1 | tail -20 +``` + +Expected: successful build + +- [ ] **Step 4: End-to-end verification** + +```bash +cd apps/website && ../../node_modules/.bin/next dev +``` + +1. Navigate to http://localhost:3000, scroll to WhitePaperSection +2. Click "↓ Download PDF" — verify browser downloads the PDF +3. Fill in email, click "Notify me" — verify success state appears +4. Check `apps/website/data/whitepaper-signups.ndjson` — verify entry was written + +- [ ] **Step 5: Commit** + +```bash +git add apps/website/src/app/page.tsx +git commit -m "feat: add WhitePaperSection to landing page" +``` diff --git a/docs/superpowers/plans/2026-04-06-rebrand-angular-stream-resource.md b/docs/superpowers/plans/2026-04-06-rebrand-angular-stream-resource.md index 8b387afb7..a3ab45f23 100644 --- a/docs/superpowers/plans/2026-04-06-rebrand-angular-stream-resource.md +++ b/docs/superpowers/plans/2026-04-06-rebrand-angular-stream-resource.md @@ -1,14 +1,14 @@ -# Rebrand to "Angular Agent Framework" Implementation Plan +# Rebrand to "Angular Stream Resource" Implementation Plan > **For agentic workers:** REQUIRED SUB-SKILL: Use superpowers:subagent-driven-development (recommended) or superpowers:executing-plans to implement this plan task-by-task. Steps use checkbox (`- [ ]`) syntax for tracking. -**Goal:** Replace all brand/product-name occurrences of "Agent" and "angular" with "Angular Agent Framework" across marketing, docs, and licensing — without touching code, npm packages, domains, or infrastructure. +**Goal:** Replace all brand/product-name occurrences of "StreamResource" and "stream-resource" with "Angular Stream Resource" across marketing, docs, and licensing — without touching code, npm packages, domains, or infrastructure. -**Architecture:** Pure text replacement across ~15 files. No code changes, no build system changes, no infrastructure changes. The Angular library `agent()` function, all types, imports, file names, domains, and npm packages remain exactly as-is. +**Architecture:** Pure text replacement across ~15 files. No code changes, no build system changes, no infrastructure changes. The Angular library `streamResource()` function, all types, imports, file names, domains, and npm packages remain exactly as-is. **Tech Stack:** Markdown, TSX (Next.js website components), plain text (license) -**Decision Rule:** If "Agent" or "angular" appears inside a code block, import statement, URL, file path, package.json `name` field, or refers to a function/type/interface — do NOT change it. Only change it when it's used as a product name in prose, headings, titles, or marketing copy. +**Decision Rule:** If "StreamResource" or "stream-resource" appears inside a code block, import statement, URL, file path, package.json `name` field, or refers to a function/type/interface — do NOT change it. Only change it when it's used as a product name in prose, headings, titles, or marketing copy. --- @@ -23,13 +23,13 @@ In `COMMERCIAL.md`, line 6, change: ``` -- **Agent Commercial License** — required for commercial use (see [`LICENSE-COMMERCIAL`](./LICENSE-COMMERCIAL)) +- **StreamResource Commercial License** — required for commercial use (see [`LICENSE-COMMERCIAL`](./LICENSE-COMMERCIAL)) ``` to: ``` -- **Angular Agent Framework Commercial License** — required for commercial use (see [`LICENSE-COMMERCIAL`](./LICENSE-COMMERCIAL)) +- **Angular Stream Resource Commercial License** — required for commercial use (see [`LICENSE-COMMERCIAL`](./LICENSE-COMMERCIAL)) ``` - [ ] **Step 2: Update README.md license section** @@ -37,22 +37,22 @@ to: In `README.md`, line 151, change: ``` -- **Agent Commercial License** — required for any for-profit or revenue-generating use. See [`LICENSE-COMMERCIAL`](./LICENSE-COMMERCIAL) and [`COMMERCIAL.md`](./COMMERCIAL.md). +- **StreamResource Commercial License** — required for any for-profit or revenue-generating use. See [`LICENSE-COMMERCIAL`](./LICENSE-COMMERCIAL) and [`COMMERCIAL.md`](./COMMERCIAL.md). ``` to: ``` -- **Angular Agent Framework Commercial License** — required for any for-profit or revenue-generating use. See [`LICENSE-COMMERCIAL`](./LICENSE-COMMERCIAL) and [`COMMERCIAL.md`](./COMMERCIAL.md). +- **Angular Stream Resource Commercial License** — required for any for-profit or revenue-generating use. See [`LICENSE-COMMERCIAL`](./LICENSE-COMMERCIAL) and [`COMMERCIAL.md`](./COMMERCIAL.md). ``` -Note: `LICENSE-COMMERCIAL` itself does NOT use "Agent" as a product name (only `@cacheplane/angular` as a package name), so it requires no changes. +Note: `LICENSE-COMMERCIAL` itself does NOT use "StreamResource" as a product name (only `@cacheplane/stream-resource` as a package name), so it requires no changes. - [ ] **Step 3: Commit** ```bash git add COMMERCIAL.md README.md -git commit -m "docs: rebrand license references to Angular Agent Framework" +git commit -m "docs: rebrand license references to Angular Stream Resource" ``` --- @@ -67,13 +67,13 @@ git commit -m "docs: rebrand license references to Angular Agent Framework" In `README.md`, line 4, change: ``` - alt="angular — The Enterprise Streaming Resource for LangChain and Angular" + alt="stream-resource — The Enterprise Streaming Resource for LangChain and Angular" ``` to: ``` - alt="Angular Agent Framework — The Enterprise Streaming Resource for LangChain and Angular" + alt="Angular Stream Resource — The Enterprise Streaming Resource for LangChain and Angular" ``` - [ ] **Step 2: Update README architecture alt text** @@ -81,13 +81,13 @@ to: In `README.md`, line 114, change: ``` - alt="angular architecture: Angular Component → agent() → StreamManager Bridge → LangGraph Platform, with signals returned reactively" + alt="stream-resource architecture: Angular Component → streamResource() → StreamManager Bridge → LangGraph Platform, with signals returned reactively" ``` to: ``` - alt="Angular Agent Framework architecture: Angular Component → agent() → StreamManager Bridge → LangGraph Platform, with signals returned reactively" + alt="Angular Stream Resource architecture: Angular Component → streamResource() → StreamManager Bridge → LangGraph Platform, with signals returned reactively" ``` - [ ] **Step 3: Commit** @@ -109,20 +109,20 @@ git commit -m "docs: rebrand README hero and architecture alt text" In `apps/website/src/app/layout.tsx`, line 25, change: ```typescript - title: 'Agent — LangChain Streaming for Angular', + title: 'StreamResource — LangChain Streaming for Angular', ``` to: ```typescript - title: 'Angular Agent Framework — LangChain Streaming for Angular', + title: 'Angular Stream Resource — LangChain Streaming for Angular', ``` - [ ] **Step 2: Commit** ```bash git add apps/website/src/app/layout.tsx -git commit -m "docs: rebrand website page title to Angular Agent Framework" +git commit -m "docs: rebrand website page title to Angular Stream Resource" ``` --- @@ -137,20 +137,20 @@ git commit -m "docs: rebrand website page title to Angular Agent Framework" In `apps/website/src/components/shared/Nav.tsx`, line 52, change: ```tsx - Agent + StreamResource ``` to: ```tsx - Angular Agent Framework + Angular Stream Resource ``` - [ ] **Step 2: Commit** ```bash git add apps/website/src/components/shared/Nav.tsx -git commit -m "docs: rebrand nav header to Angular Agent Framework" +git commit -m "docs: rebrand nav header to Angular Stream Resource" ``` --- @@ -165,13 +165,13 @@ git commit -m "docs: rebrand nav header to Angular Agent Framework" In `apps/website/src/components/shared/Footer.tsx`, line 43, change: ```tsx -

    Agent

    +

    StreamResource

    ``` to: ```tsx -

    Angular Agent Framework

    +

    Angular Stream Resource

    ``` - [ ] **Step 2: Update Footer copyright** @@ -179,20 +179,20 @@ to: In `apps/website/src/components/shared/Footer.tsx`, line 130, change: ```tsx - © {new Date().getFullYear()} Agent. All rights reserved. + © {new Date().getFullYear()} StreamResource. All rights reserved. ``` to: ```tsx - © {new Date().getFullYear()} Angular Agent Framework. All rights reserved. + © {new Date().getFullYear()} Angular Stream Resource. All rights reserved. ``` - [ ] **Step 3: Commit** ```bash git add apps/website/src/components/shared/Footer.tsx -git commit -m "docs: rebrand footer to Angular Agent Framework" +git commit -m "docs: rebrand footer to Angular Stream Resource" ``` --- @@ -208,13 +208,13 @@ git commit -m "docs: rebrand footer to Angular Agent Framework" In `apps/website/src/components/landing/ProblemSection.tsx`, line 288, change: ```tsx - Agent closes the gap + StreamResource closes the gap ``` to: ```tsx - Angular Agent Framework closes the gap + Angular Stream Resource closes the gap ``` - [ ] **Step 2: Update GenerativeUIFrame aria-label** @@ -222,20 +222,20 @@ to: In `apps/website/src/components/landing/GenerativeUIFrame.tsx`, line 30, change: ```tsx - aria-label="Animated demo of angular orchestrating a multi-step agent UI" + aria-label="Animated demo of stream-resource orchestrating a multi-step agent UI" ``` to: ```tsx - aria-label="Animated demo of Angular Agent Framework orchestrating a multi-step agent UI" + aria-label="Animated demo of Angular Stream Resource orchestrating a multi-step agent UI" ``` - [ ] **Step 3: Commit** ```bash git add apps/website/src/components/landing/ProblemSection.tsx apps/website/src/components/landing/GenerativeUIFrame.tsx -git commit -m "docs: rebrand landing page components to Angular Agent Framework" +git commit -m "docs: rebrand landing page components to Angular Stream Resource" ``` --- @@ -250,13 +250,13 @@ git commit -m "docs: rebrand landing page components to Angular Agent Framework" In `apps/website/src/app/llms.txt/route.ts`, line 8, change: ```typescript - `# angular v${version}`, + `# stream-resource v${version}`, ``` to: ```typescript - `# Angular Agent Framework v${version}`, + `# Angular Stream Resource v${version}`, ``` - [ ] **Step 2: Update llms.txt description** @@ -264,20 +264,20 @@ to: In `apps/website/src/app/llms.txt/route.ts`, line 10, change: ```typescript - "Angular streaming library for LangChain/LangGraph. Provides agent() — full parity with React's useStream() hook, built on Angular Signals.", + "Angular streaming library for LangChain/LangGraph. Provides streamResource() — full parity with React's useStream() hook, built on Angular Signals.", ``` to: ```typescript - "Angular Agent Framework — the enterprise streaming library for LangChain/LangGraph. Provides agent() — full parity with React's useStream() hook, built on Angular Signals.", + "Angular Stream Resource — the enterprise streaming library for LangChain/LangGraph. Provides streamResource() — full parity with React's useStream() hook, built on Angular Signals.", ``` - [ ] **Step 3: Commit** ```bash git add apps/website/src/app/llms.txt/route.ts -git commit -m "docs: rebrand llms.txt to Angular Agent Framework" +git commit -m "docs: rebrand llms.txt to Angular Stream Resource" ``` --- @@ -286,7 +286,7 @@ git commit -m "docs: rebrand llms.txt to Angular Agent Framework" **Files:** - Modify: `docs/limitations.md:1` -- Modify: `docs/superpowers/specs/2026-03-17-angular-design.md:1,5,11` +- Modify: `docs/superpowers/specs/2026-03-17-stream-resource-design.md:1,5,11` - Modify: `docs/superpowers/specs/2026-03-18-agentic-docs-design.md:1,11` - Modify: `docs/superpowers/specs/2026-03-18-website-branding-design.md:1,11` - Modify: `docs/superpowers/specs/2026-04-04-expanded-introduction-design.md:18` @@ -296,35 +296,35 @@ git commit -m "docs: rebrand llms.txt to Angular Agent Framework" Line 1, change: ```markdown -# Agent — Angular Limitations vs React useStream() +# StreamResource — Angular Limitations vs React useStream() ``` to: ```markdown -# Angular Agent Framework — Limitations vs React useStream() +# Angular Stream Resource — Limitations vs React useStream() ``` -- [ ] **Step 2: Update angular-design.md brand references** +- [ ] **Step 2: Update stream-resource-design.md brand references** -In `docs/superpowers/specs/2026-03-17-angular-design.md`: +In `docs/superpowers/specs/2026-03-17-stream-resource-design.md`: Line 1, change: ```markdown -# Agent — Design Specification +# StreamResource — Design Specification ``` to: ```markdown -# Angular Agent Framework — Design Specification +# Angular Stream Resource — Design Specification ``` Line 11, change: ```markdown -Agent is an Angular 20+ library that provides `agent()` — a full-parity implementation +StreamResource is an Angular 20+ library that provides `streamResource()` — a full-parity implementation ``` to: ```markdown -Angular Agent Framework is an Angular 20+ library that provides `agent()` — a full-parity implementation +Angular Stream Resource is an Angular 20+ library that provides `streamResource()` — a full-parity implementation ``` - [ ] **Step 3: Update agentic-docs-design.md brand references** @@ -333,20 +333,20 @@ In `docs/superpowers/specs/2026-03-18-agentic-docs-design.md`: Line 1, change: ```markdown -# Agent — Agentic Docs & Hero Redesign Specification +# StreamResource — Agentic Docs & Hero Redesign Specification ``` to: ```markdown -# Angular Agent Framework — Agentic Docs & Hero Redesign Specification +# Angular Stream Resource — Agentic Docs & Hero Redesign Specification ``` Line 11, change: ```markdown -Two related changes to the Agent website: +Two related changes to the StreamResource website: ``` to: ```markdown -Two related changes to the Angular Agent Framework website: +Two related changes to the Angular Stream Resource website: ``` - [ ] **Step 4: Update website-branding-design.md brand references** @@ -355,20 +355,20 @@ In `docs/superpowers/specs/2026-03-18-website-branding-design.md`: Line 1, change: ```markdown -# Agent Website — Brand Refresh Design Specification +# StreamResource Website — Brand Refresh Design Specification ``` to: ```markdown -# Angular Agent Framework Website — Brand Refresh Design Specification +# Angular Stream Resource Website — Brand Refresh Design Specification ``` Line 11, change: ```markdown -Update the Agent website design from its original warm "dark luxury" aesthetic +Update the StreamResource website design from its original warm "dark luxury" aesthetic ``` to: ```markdown -Update the Angular Agent Framework website design from its original warm "dark luxury" aesthetic +Update the Angular Stream Resource website design from its original warm "dark luxury" aesthetic ``` - [ ] **Step 5: Update expanded-introduction-design.md brand reference** @@ -377,18 +377,18 @@ In `docs/superpowers/specs/2026-04-04-expanded-introduction-design.md`: Line 18, change: ```markdown -### Section 1: What is Agent? +### Section 1: What is StreamResource? ``` to: ```markdown -### Section 1: What is Angular Agent Framework? +### Section 1: What is Angular Stream Resource? ``` - [ ] **Step 6: Commit** ```bash -git add docs/limitations.md docs/superpowers/specs/2026-03-17-angular-design.md docs/superpowers/specs/2026-03-18-agentic-docs-design.md docs/superpowers/specs/2026-03-18-website-branding-design.md docs/superpowers/specs/2026-04-04-expanded-introduction-design.md -git commit -m "docs: rebrand spec and doc titles to Angular Agent Framework" +git add docs/limitations.md docs/superpowers/specs/2026-03-17-stream-resource-design.md docs/superpowers/specs/2026-03-18-agentic-docs-design.md docs/superpowers/specs/2026-03-18-website-branding-design.md docs/superpowers/specs/2026-04-04-expanded-introduction-design.md +git commit -m "docs: rebrand spec and doc titles to Angular Stream Resource" ``` --- @@ -406,13 +406,13 @@ Both files have identical content. They serve as LLM context files published to In `apps/website/public/AGENTS.md`, line 1, change: ```markdown -# angular v0.0.1 +# stream-resource v0.0.1 ``` to: ```markdown -# Angular Agent Framework v0.0.1 +# Angular Stream Resource v0.0.1 ``` - [ ] **Step 2: Update CLAUDE.md header** @@ -420,22 +420,22 @@ to: In `apps/website/public/CLAUDE.md`, line 1, change: ```markdown -# angular v0.0.1 +# stream-resource v0.0.1 ``` to: ```markdown -# Angular Agent Framework v0.0.1 +# Angular Stream Resource v0.0.1 ``` -Note: All other references in these files (`agent()`, `provideAgent`, `import ... from 'angular'`, `@angular/mcp`, `cacheplane.ai`) are code/infrastructure references and should NOT be changed. +Note: All other references in these files (`streamResource()`, `provideStreamResource`, `import ... from 'stream-resource'`, `@stream-resource/mcp`, `cacheplane.ai`) are code/infrastructure references and should NOT be changed. - [ ] **Step 3: Commit** ```bash git add apps/website/public/AGENTS.md apps/website/public/CLAUDE.md -git commit -m "docs: rebrand LLM context file headers to Angular Agent Framework" +git commit -m "docs: rebrand LLM context file headers to Angular Stream Resource" ``` --- @@ -443,26 +443,26 @@ git commit -m "docs: rebrand LLM context file headers to Angular Agent Framework ### Task 10: Remaining Spec/Plan Files with Brand References **Files:** -- Search all files in `docs/superpowers/specs/` and `docs/superpowers/plans/` for "Agent" used as a product name in prose (not in code blocks) +- Search all files in `docs/superpowers/specs/` and `docs/superpowers/plans/` for "StreamResource" used as a product name in prose (not in code blocks) - [ ] **Step 1: Grep for remaining brand references** Run: ```bash -grep -rn "Agent" docs/superpowers/specs/ docs/superpowers/plans/ --include="*.md" | grep -v "agent\|AgentRef\|AgentOptions\|AgentConfig\|AgentTransport\|MockAgent\|provideAgent\|addAgent\|handleAddAgent\|STREAM_RESOURCE\|rebrand-angular" +grep -rn "StreamResource" docs/superpowers/specs/ docs/superpowers/plans/ --include="*.md" | grep -v "streamResource\|StreamResourceRef\|StreamResourceOptions\|StreamResourceConfig\|StreamResourceTransport\|MockStreamResource\|provideStreamResource\|addStreamResource\|handleAddStreamResource\|STREAM_RESOURCE\|rebrand-angular" ``` This filters out all code references, leaving only brand usage. - [ ] **Step 2: Update any remaining brand references found** -For each match from Step 1, apply the same rule: if "Agent" is used as a product name in prose/headings, change to "Angular Agent Framework". If it's inside a code block or refers to a function/type, leave it. +For each match from Step 1, apply the same rule: if "StreamResource" is used as a product name in prose/headings, change to "Angular Stream Resource". If it's inside a code block or refers to a function/type, leave it. - [ ] **Step 3: Commit** ```bash git add docs/ -git commit -m "docs: rebrand remaining spec references to Angular Agent Framework" +git commit -m "docs: rebrand remaining spec references to Angular Stream Resource" ``` --- @@ -482,7 +482,7 @@ Expected: Build succeeds with no errors. Run: ```bash -npx nx build angular +npx nx build stream-resource ``` Expected: Build succeeds (no changes were made to library code). @@ -491,16 +491,16 @@ Expected: Build succeeds (no changes were made to library code). Run: ```bash -grep -rn "Agent" --include="*.md" --include="*.tsx" --include="*.ts" --include="*.html" . | grep -v node_modules | grep -v dist | grep -v ".angular" | grep -v "agent\|AgentRef\|AgentOptions\|AgentConfig\|AgentTransport\|MockAgent\|provideAgent\|addAgent\|handleAddAgent\|STREAM_RESOURCE_CONFIG\|rebrand-angular" +grep -rn "StreamResource" --include="*.md" --include="*.tsx" --include="*.ts" --include="*.html" . | grep -v node_modules | grep -v dist | grep -v ".angular" | grep -v "streamResource\|StreamResourceRef\|StreamResourceOptions\|StreamResourceConfig\|StreamResourceTransport\|MockStreamResource\|provideStreamResource\|addStreamResource\|handleAddStreamResource\|STREAM_RESOURCE_CONFIG\|rebrand-angular" ``` -Review each match. Any brand usage of "Agent" remaining should be updated. +Review each match. Any brand usage of "StreamResource" remaining should be updated. -- [ ] **Step 4: Final grep for "angular" as brand** +- [ ] **Step 4: Final grep for "stream-resource" as brand** Run: ```bash -grep -rn '"angular' --include="*.md" --include="*.tsx" --include="*.ts" . | grep -v node_modules | grep -v dist | grep -v "package\|import\|from\|require\|cacheplane\|/libs/\|/apps/\|tsconfig\|project.json\|\.dev" +grep -rn '"stream-resource' --include="*.md" --include="*.tsx" --include="*.ts" . | grep -v node_modules | grep -v dist | grep -v "package\|import\|from\|require\|cacheplane\|/libs/\|/apps/\|tsconfig\|project.json\|\.dev" ``` Review each match. Any brand usage (not package name, not URL, not file path) should be updated. diff --git a/docs/superpowers/specs/2026-03-21-agent-first-docs-design.md b/docs/superpowers/specs/2026-03-21-agent-first-docs-design.md new file mode 100644 index 000000000..9e6bbf191 --- /dev/null +++ b/docs/superpowers/specs/2026-03-21-agent-first-docs-design.md @@ -0,0 +1,258 @@ +# Agent-First Developer Docs Design + +**Date:** 2026-03-21 +**Status:** Proposed +**Scope:** End-to-end documentation content system for StreamResource, cockpit examples, Deep Agents, and LangGraph, optimized for software developers using agents. + +--- + +## Goal + +Create best-in-class developer documentation that is optimized for software developers working with agents. + +The docs must: + +- cover the full product coherently end to end +- be generated first from code and examples as the source of truth +- materialize into editable Markdown +- remain long-lived and contributor-friendly +- provide excellent first-pass architecture and walkthrough explanations +- support both frontend and backend code explanation through the UI +- report stale docs without gating updates + +--- + +## Core Principle + +Code and examples are the absolute source of truth. + +Documentation is generated from: + +- code +- example manifests +- prompts +- tests +- commands +- runtime metadata + +The generated Markdown becomes the long-lived maintained artifact after the initial generation pass. + +Generator-owned metadata blocks and sections must be explicitly marked in the Markdown output so later tooling can compare source-of-truth expectations without blindly overwriting contributor edits. + +--- + +## Intended Reader + +The primary reader is: + +- a software developer using one or more agents to implement, modify, or verify a feature + +The docs should therefore optimize for: + +- exact goals +- architecture clarity +- file-level orientation +- copy-pasteable commands +- verification steps +- common failure modes +- prompts and implementation briefs +- frontend and backend visibility + +--- + +## Research Direction + +The system should learn from platforms such as: + +- MkDocs Material +- Mintlify +- Docusaurus +- Fern + +But the recommended direction is a custom Next.js-based system integrated with the existing website and cockpit, because: + +- the source of truth is custom code/example metadata, not only Markdown +- the cockpit and website must share metadata and navigation +- frontend and backend code views are first-class +- agent-first generated explanations and verification flows are required + +--- + +## Documentation Model + +The first version uses a **fully generated seed** model: + +1. Extract from code/examples/tests/prompts/manifests +2. Synthesize strong first-pass docs +3. Write Markdown into the docs tree +4. Keep Markdown editable over time +5. Use drift reporting to suggest updates later + +This is intentionally not a continuously regenerated docs system. + +--- + +## Source Buckets + +Each generated page should draw from five source buckets: + +1. Manifest data +- product +- topic +- page id +- language +- maturity +- routing +- fallback behavior + +2. Example/module artifacts +- source files +- prompts +- configs +- commands +- runtime class + +3. Tests and verification +- smoke targets +- integration targets +- expected outputs +- failure boundaries + +4. Code structure +- frontend files +- backend files +- entrypoints +- dependency relationships + +5. Narrative synthesis +- generated explanations of: + - what the example does + - why it exists + - how it is structured + - how to build it + - how to verify it + +For the initial pass, narrative synthesis is allowed to come from: + +- structured heuristics derived from manifests, prompts, tests, commands, and code maps +- generator-authored explanatory text based on those inputs + +The generated narrative is intended to be strong enough for the first baseline, but it is expected that contributors will improve the Markdown afterward. + +--- + +## Page Model + +The standard generated page shape should be: + +1. Overview +2. Why this example exists +3. Architecture +4. File map +5. Build steps +6. Prompts +7. Frontend code +8. Backend code +9. Verification +10. Common failure modes +11. Related examples + +This page model should apply across: + +- StreamResource library docs +- cockpit capability docs +- Deep Agents docs +- LangGraph docs + +Not every page needs every section rendered identically, but the model should be consistent enough for agents to rely on it. + +--- + +## Content Scope + +This docs program covers all documentation content, not just the new cockpit pages: + +- StreamResource library docs +- website docs +- cockpit-linked capability docs +- Deep Agents docs +- LangGraph docs + +The docs experience should become coherent end to end. + +The first generation pass should prioritize areas that already have useful structured source artifacts: + +- cockpit capability docs +- Deep Agents and LangGraph capability docs +- StreamResource library docs where code/examples/prompts/tests already provide enough source material + +Areas with weaker source metadata may receive thinner generated pages in v1, but they still need to fit the same page model. + +--- + +## Markdown Output + +All generated docs should be written to Markdown so contributors can maintain them over time. + +Markdown is the long-lived editable artifact. + +The system should preserve: + +- stable frontmatter or metadata blocks +- structured references back to source-of-truth artifacts +- enough generated structure that later edits do not require a full rewrite + +For the initial system, generated Markdown lives in the same docs tree the website reads. Generator-owned sections must be clearly delimited so drift reporting can stay advisory and ordinary maintenance does not require full regeneration. + +--- + +## Drift Reporting + +Post-generation drift handling is advisory only. + +The system should: + +- detect stale docs relative to code/example metadata where practical +- emit warnings +- suggest a prompt or update brief for the contributor/agent + +The system should not: + +- fail CI solely because docs diverged after generation +- force a full regeneration pass for ordinary doc maintenance + +--- + +## UX Expectations + +The docs experience should be excellent for agent-assisted development: + +- architecture explanations should be strong enough to orient an agent quickly +- build steps should be explicit +- verification should be concrete +- frontend and backend code should be surfaced clearly +- prompts should be available and copyable +- related examples should help navigation through the matrix + +--- + +## Non-Goals + +This spec does not define: + +- exact cockpit implementation details +- exact website UI styling +- final CI workflow wiring +- TypeScript parity rollout policy beyond existing approved cockpit policies + +--- + +## Success Criteria + +This spec is successful when: + +- docs can be generated once into strong Markdown from source-of-truth artifacts +- the generated output is good enough to serve as the initial high-quality docs baseline +- contributors can iterate on Markdown directly afterward +- stale docs are reported without becoming a hard gate +- the docs are genuinely useful for software developers using agents diff --git a/docs/superpowers/specs/2026-03-22-cockpit-ui-refresh-design.md b/docs/superpowers/specs/2026-03-22-cockpit-ui-refresh-design.md new file mode 100644 index 000000000..777c1453e --- /dev/null +++ b/docs/superpowers/specs/2026-03-22-cockpit-ui-refresh-design.md @@ -0,0 +1,241 @@ +# Cockpit UI Refresh Design + +**Date:** 2026-03-22 +**Status:** Approved in-session +**Scope:** Visual and interaction design for the cockpit shell experience on top of the existing manifest-driven architecture. + +--- + +## Goal + +Turn the cockpit into a simple, modern React application that still functions as a marketing surface, while keeping the primary experience centered on runnable examples and implementation visibility. + +The shell should feel like a polished product, not an internal tool. It should inherit the visual language of the website while reducing visual noise and keeping attention on the selected example. + +--- + +## Product Direction + +The cockpit home is a workspace-first product surface. + +It is not a separate landing page and it is not a dense engineering dashboard. Users should land directly inside the application shell, with: + +- a stable left rail for language and capability selection +- a clear example title and context header +- a default `Run` mode that opens the live surface immediately +- alternate `Code` and `Docs` modes that meaningfully recompose the main workspace +- a prompt asset interaction that is close at hand but not promoted to a top-level mode + +The shell should still sell the product, but it should do so through restraint, clarity, and confidence rather than large hero copy blocks. + +--- + +## Visual Language + +The cockpit should align with the website's current design system: + +- dark editorial background +- cool blue accent +- serif headline moments paired with restrained sans-serif UI text +- monospaced labels and code surfaces + +The cockpit should be visually flatter and calmer than the current website landing page: + +- fewer nested cards +- fewer stacked rounded boxes +- larger continuous surfaces +- more whitespace +- clearer alignment and stronger typographic hierarchy + +The end result should feel like the website's product interior, not like a second unrelated app. + +--- + +## Shell Layout + +### Desktop + +The cockpit is a full-height application shell with two persistent zones: + +- left rail +- main workspace + +The left rail remains visible on desktop and contains: + +- product branding +- custom language picker +- grouped capability navigation +- a short supporting note + +The left rail groups entries under small bold group headers such as: + +- `Deep Agents` +- `LangGraph` + +The main workspace contains: + +- contextual header +- primary mode switch +- mode-specific content surface + +### Mobile / Narrow Screens + +The layout should remain responsive, but the interaction model stays the same: + +- left rail can stack above content or become collapsible later if needed +- prompt viewer becomes a bottom-anchored or narrow slide-over treatment +- mode switching remains visible and simple + +This spec does not require a mobile drawer redesign yet, only a responsive implementation of the same shell. + +--- + +## Language Control + +Language selection should live in the left rail and use a custom UI component, not the browser's native `select`. + +Requirements: + +- visually aligned with the rest of the shell +- supports `Python` and `TypeScript` +- reflects current active language clearly +- supports future richer states such as unavailable parity or fallback messaging + +This is both a functional control and part of the product polish. + +--- + +## Navigation Model + +Capability navigation should stay grouped and readable. + +Requirements: + +- grouped by product area +- selected item clearly highlighted +- quiet inactive state +- no excessive badge clutter in the first version + +The navigation should communicate structure without competing with the main content area. + +--- + +## Primary Modes + +The shell uses exactly three primary modes: + +- `Run` +- `Code` +- `Docs` + +These are not superficial tabs. Each mode should recompose the main workspace to match the user's intent. + +### Run + +`Run` is the default mode. + +It prioritizes: + +- the live example surface +- runtime feedback or logs +- a compact supporting rail with relevant context + +The shell should make it obvious that users can interact with a working reference immediately. + +### Code + +`Code` is a focused implementation-reading mode. + +Requirements: + +- no left-side file column inside the code surface +- code files represented as tabs across the top of the code interface +- only one active file open at a time +- tabs represent the relevant frontend and backend files for the selected example +- editor header still shows the full file path or context + +This should feel closer to an IDE than to a docs sidebar. + +### Docs + +`Docs` is a clean implementation guide. + +It should feel like a documentation page, not a dashboard: + +- strong title and intro +- explanatory narrative +- inline code samples where useful +- prompt-related copy affordances +- clear threading between run behavior, code, and prompts + +The docs mode should help the user understand the example as one system. + +--- + +## Prompt Interaction + +Prompt assets remain important, but they are not a fourth primary mode. + +The shell should expose a clear `Open prompt assets` call to action from the header. + +Requirements: + +- opens a responsive slide-over panel +- keeps the main shell visible underneath +- supports switching between prompt assets +- supports copy-oriented actions +- can later support deeper links into code mode or docs mode + +This makes prompts feel intentional and accessible without overloading the top-level information architecture. + +--- + +## Content Model In The UI + +The cockpit should continue using manifest-driven content selection. + +The UI should surface: + +- current product / topic / language context +- relevant frontend and backend files +- prompt assets +- docs bundle content + +The shell should not hardcode per-example page logic for layout decisions beyond the general mode system. + +--- + +## Interaction Principles + +- Default to `Run` +- Keep navigation stable +- Let modes recompose the workspace +- Keep prompt access secondary but obvious +- Prefer fewer, larger surfaces over nested boxes +- Keep code visibility concrete and implementation-oriented +- Treat frontend and backend as one example system + +--- + +## Non-Goals + +This design does not define: + +- new capability taxonomy +- new manifest identity rules +- capability-specific runtime controls +- matrix rollout changes +- mobile-specific information architecture beyond responsiveness + +--- + +## Success Criteria + +This design is successful when: + +- the cockpit looks aligned with the website but calmer and more product-like +- the shell feels simple and modern rather than dense +- users land in a runnable experience by default +- code mode feels like a real implementation-reading surface +- docs mode reads like a high-quality technical guide +- prompt assets are easy to access without becoming a top-level mode diff --git a/docs/superpowers/specs/2026-04-06-rebrand-angular-stream-resource.md b/docs/superpowers/specs/2026-04-06-rebrand-angular-stream-resource.md index cdb2da04b..65faa52a5 100644 --- a/docs/superpowers/specs/2026-04-06-rebrand-angular-stream-resource.md +++ b/docs/superpowers/specs/2026-04-06-rebrand-angular-stream-resource.md @@ -1,20 +1,20 @@ -# Rebrand to "Angular Agent Framework" — Design Specification +# Rebrand to "Angular Stream Resource" — Design Specification ## Overview -Rename the product brand from "Agent" / "angular" to **Angular Agent Framework** across all marketing, documentation, and licensing materials. The Angular library code, npm package, domain, and infrastructure remain unchanged. +Rename the product brand from "StreamResource" / "stream-resource" to **Angular Stream Resource** across all marketing, documentation, and licensing materials. The Angular library code, npm package, domain, and infrastructure remain unchanged. ## Motivation -Angular 20 introduced `rxResource` with a `stream` property, which the official docs call a "Streaming Resource." The current product name "Agent" collides with this first-party terminology, creating confusion about whether the product is an official Angular package. Rebranding to "Angular Agent Framework" makes the name explicitly descriptive, positions it as the Angular-ecosystem standard for LLM streaming, and avoids ambiguity with the built-in API. +Angular 20 introduced `rxResource` with a `stream` property, which the official docs call a "Streaming Resource." The current product name "StreamResource" collides with this first-party terminology, creating confusion about whether the product is an official Angular package. Rebranding to "Angular Stream Resource" makes the name explicitly descriptive, positions it as the Angular-ecosystem standard for LLM streaming, and avoids ambiguity with the built-in API. ## Brand Identity | Attribute | Value | |-----------|-------| -| Full product name | Angular Agent Framework | +| Full product name | Angular Stream Resource | | Parent brand | Cacheplane | -| npm package | `@cacheplane/angular` (unchanged) | +| npm package | `@cacheplane/stream-resource` (unchanged) | | Domain | `cacheplane.ai` (unchanged) | | Tagline | "The Enterprise Streaming Resource for LangChain and Angular" (unchanged) | @@ -24,24 +24,24 @@ Angular 20 introduced `rxResource` with a `stream` property, which the official Brand-layer text only. All changes are string replacements in documentation and marketing files. -**Pattern:** Replace "Agent" (PascalCase, used as a product name) with "Angular Agent Framework" in prose contexts. Do NOT replace it in code contexts (function names, type names, imports, injection tokens). +**Pattern:** Replace "StreamResource" (PascalCase, used as a product name) with "Angular Stream Resource" in prose contexts. Do NOT replace it in code contexts (function names, type names, imports, injection tokens). #### 1. License Files -- `LICENSE-COMMERCIAL`: "Agent Commercial License" -> "Angular Agent Framework Commercial License" +- `LICENSE-COMMERCIAL`: "StreamResource Commercial License" -> "Angular Stream Resource Commercial License" - `COMMERCIAL.md`: All product name references #### 2. README -- Hero image alt text: "angular — The Enterprise Streaming Resource..." -> "Angular Agent Framework — The Enterprise Streaming Resource..." +- Hero image alt text: "stream-resource — The Enterprise Streaming Resource..." -> "Angular Stream Resource — The Enterprise Streaming Resource..." - Any prose references to the product name #### 3. Website (apps/website) -- Page `` and meta tags: "Agent — ..." -> "Angular Agent Framework — ..." +- Page `<title>` and meta tags: "StreamResource — ..." -> "Angular Stream Resource — ..." - Hero section headings - Navigation/footer brand references - Open Graph / social meta tags #### 4. Documentation Files (docs/) -- Document titles where "Agent" appears as a product name (e.g., "Agent — Angular Limitations..." -> "Angular Agent Framework — Angular Limitations...") +- Document titles where "StreamResource" appears as a product name (e.g., "StreamResource — Angular Limitations..." -> "Angular Stream Resource — Angular Limitations...") - Design specs that reference the product name in headings/descriptions #### 5. Cockpit Example Guides (cockpit/**/docs/guide.md) @@ -54,29 +54,29 @@ Brand-layer text only. All changes are string replacements in documentation and | Category | Examples | Reason | |----------|----------|--------| -| Function names | `agent()` | Code API identity | -| Type/interface names | `AgentRef`, `AgentOptions`, `AgentTransport`, `AgentConfig` | Code API identity | +| Function names | `streamResource()` | Code API identity | +| Type/interface names | `StreamResourceRef`, `StreamResourceOptions`, `StreamResourceTransport`, `StreamResourceConfig` | Code API identity | | Injection tokens | `STREAM_RESOURCE_CONFIG` | Code API identity | -| Provider functions | `provideAgent()` | Code API identity | -| Mock utilities | `MockAgentRef`, `createMockAgentRef()` | Code API identity | -| MCP tool functions | `addAgentTool`, `handleAddAgent()` | Code API identity | -| npm package name | `@cacheplane/angular` | Package identity | -| Import paths | `from '@cacheplane/angular'` | Package identity | -| File/directory names | `angular.fn.ts`, `/libs/angular/` | File system identity | -| Nx project names | `angular`, `angular-e2e` | Build system identity | -| TypeScript path aliases | `@cacheplane/angular` in tsconfig | Build system identity | +| Provider functions | `provideStreamResource()` | Code API identity | +| Mock utilities | `MockStreamResourceRef`, `createMockStreamResourceRef()` | Code API identity | +| MCP tool functions | `addStreamResourceTool`, `handleAddStreamResource()` | Code API identity | +| npm package name | `@cacheplane/stream-resource` | Package identity | +| Import paths | `from '@cacheplane/stream-resource'` | Package identity | +| File/directory names | `stream-resource.fn.ts`, `/libs/stream-resource/` | File system identity | +| Nx project names | `stream-resource`, `stream-resource-e2e` | Build system identity | +| TypeScript path aliases | `@cacheplane/stream-resource` in tsconfig | Build system identity | | Domain URLs | `cacheplane.ai`, `examples.cacheplane.ai` | Infrastructure identity | | CI/CD workflows | GitHub Actions references | Infrastructure identity | -| package.json `name` fields | `"name": "angular"` | Package identity | +| package.json `name` fields | `"name": "stream-resource"` | Package identity | ## Decision Rules for Ambiguous Cases -When encountering "Agent" or "angular" in a file, apply this test: +When encountering "StreamResource" or "stream-resource" in a file, apply this test: 1. **Is it inside a code block, import statement, or referring to an API?** -> Do NOT change. -2. **Is it a package.json `name` or `description` field that serves as an npm identifier?** -> Do NOT change `name`. DO change `description` if it uses "Agent" as a product name. -3. **Is it prose text describing the product to a human reader?** -> Change to "Angular Agent Framework". -4. **Is it a document title/heading naming the product?** -> Change to "Angular Agent Framework". +2. **Is it a package.json `name` or `description` field that serves as an npm identifier?** -> Do NOT change `name`. DO change `description` if it uses "StreamResource" as a product name. +3. **Is it prose text describing the product to a human reader?** -> Change to "Angular Stream Resource". +4. **Is it a document title/heading naming the product?** -> Change to "Angular Stream Resource". 5. **Is it a URL or domain reference?** -> Do NOT change. ## File Inventory @@ -91,9 +91,9 @@ Estimated ~60 files need changes, distributed as: ## Testing Strategy - Verify the website builds successfully after changes (`nx build website`) -- Verify the Angular library builds successfully (should be unaffected, but confirm: `nx build angular`) +- Verify the Angular library builds successfully (should be unaffected, but confirm: `nx build stream-resource`) - Visual check of the website to confirm brand text renders correctly -- Grep for remaining "Agent" instances in prose contexts to confirm completeness +- Grep for remaining "StreamResource" instances in prose contexts to confirm completeness ## Rollout From 427bc73d8939c349cf0b61906606d29ab4cc1091 Mon Sep 17 00:00:00 2001 From: Brian Love <brian@liveloveapp.com> Date: Wed, 8 Apr 2026 12:35:50 -0700 Subject: [PATCH 03/13] docs: add streaming generative UI design spec and research Design spec for auto-detecting and rendering generative UI content (json-render, A2UI, markdown) in the chat component. Includes: - Custom tree-based partial JSON parser library design - Content classifier service with progressive detection - Render lib element-level memoization for streaming efficiency - ParseTreeStore bridge with structural sharing - End-to-end spike plan for validation before implementation Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com> --- .../research/partial-json-parsing-research.md | 128 +++++ ...26-04-08-streaming-generative-ui-design.md | 479 ++++++++++++++++++ 2 files changed, 607 insertions(+) create mode 100644 docs/research/partial-json-parsing-research.md create mode 100644 docs/superpowers/specs/2026-04-08-streaming-generative-ui-design.md diff --git a/docs/research/partial-json-parsing-research.md b/docs/research/partial-json-parsing-research.md new file mode 100644 index 000000000..9abe1c4d5 --- /dev/null +++ b/docs/research/partial-json-parsing-research.md @@ -0,0 +1,128 @@ +# Partial/Eager JSON Parsing for Streaming LLM Responses + +## Problem Statement + +When an LLM streams structured JSON (e.g., a UI spec), the response arrives token-by-token as incomplete JSON fragments. We need to: + +1. **Parse incomplete JSON into a usable object** as it streams — e.g., `{"root": "r1", "elements": {"r1": {"type": "te` should yield `{ root: "r1", elements: { r1: { type: "te" } } }` +2. **Maintain object reference integrity** so that reactive frameworks (Angular signals, React state) don't thrash — unchanged subtrees should keep the same object references between parses +3. **Detect content type early** — determine whether the streaming content is a JSON-Render spec, A2UI payload, or plain markdown + +## Existing Libraries + +### Tier 1: Purpose-Built for LLM Streaming + +#### `partial-json` (npm: `partial-json`) +- **Approach**: Full re-parse of accumulated string on every token +- **API**: `parse(incompleteString, Allow.STR | Allow.OBJ | ...)` — bitwise flags control what can be partial +- **Object stability**: None — returns fresh objects every call +- **Used by**: Vercel AI SDK implements the same concept internally (custom `parsePartialJson` in `@ai-sdk/provider-utils`) +- **Tradeoff**: Simple and correct. O(n²) over the full stream but negligible for typical LLM output (<50KB, sub-0.5ms per parse) + +#### `jsonriver` (npm: `jsonriver`) +- **Approach**: Incremental mutation — mutates the same object references throughout parsing +- **API**: Yields the same root object repeatedly as it fills in; strings only extend, arrays only append or mutate the final element, objects only add properties or mutate the most recent one +- **Object stability**: Yes — same object/array references across the entire stream. This is the only library that explicitly maintains object identity. +- **Author**: Google engineer, designed specifically for "streaming rendering of UIs defined by JSON generated by LLMs" +- **Performance**: ~5x slower than native `JSON.parse`, ~5-10x faster than stream-json. O(n) total. +- **Tradeoff**: Mutation-in-place conflicts with immutable state patterns. Angular signals that compare by reference may NOT detect inner mutations unless you explicitly notify. Would need an adapter layer. + +#### `jsonchunk` (npm: `jsonchunk`) +- **Approach**: Full re-parse, like `partial-json` +- **API**: `parse<T>(input)`, `createParser<T>()` (push-based), `parseStream<T>()` (async generator), `createStream<T>()` (TransformStream). Returns `DeepPartial<T>`. +- **Object stability**: None — fresh objects every snapshot +- **Size**: ~5KB, zero dependencies, TypeScript +- **Tradeoff**: Clean typed API with DeepPartial generics, but same O(n²) and no reference stability + +#### `llm-json-stream` (npm: `llm-json-stream`) +- **Approach**: Character-by-character SAX-style parser with property subscriptions +- **API**: `parser.getStringProperty('response')` yields string chunks; `listStream.onElement()` fires when array elements begin parsing +- **Object stability**: N/A — event-driven, doesn't build a tree +- **Tradeoff**: True O(n) incremental, but requires knowing the schema shape up front and wiring events to state manually + +#### `best-effort-json-parser` / `incomplete-json-parser` +- Similar to `partial-json` but less configurable. Mentioned for completeness. + +### Tier 2: General Streaming JSON (Not LLM-Specific) + +#### `@streamparser/json` +- SAX-style event-based parser. Emits tokens/events as it parses valid chunked JSON (e.g., streaming a 2GB file). Does **not** handle syntactically incomplete fragments — expects valid JSON arriving in pieces. + +#### `@discoveryjs/json-ext` +- `parseChunked()` for very large JSON (>100MB). Same constraint: expects valid JSON in chunks, not incomplete LLM fragments. + +### Summary Table + +| Library | Approach | Object Stability | Complexity | LLM-Specific | +|---------|----------|-----------------|------------|---------------| +| `partial-json` | Full re-parse | No (new objects) | O(n²) total | Yes | +| `jsonriver` | Incremental mutation | **Yes (same refs)** | O(n) total | Yes | +| `jsonchunk` | Full re-parse | No (new objects) | O(n²) total | Yes | +| `llm-json-stream` | SAX/event-based | N/A (events) | O(n) total | Yes | +| `@streamparser/json` | SAX/event-based | N/A (events) | O(n) total | No | + +## Object Reference Integrity Strategies + +The core challenge: reactive frameworks need stable object references for unchanged subtrees. Three approaches: + +### A. Re-parse + Diff + Structural Sharing + +1. Accumulate raw text, re-parse with `partial-json` on each token (~0.1ms for <10KB) +2. Diff the new parse result against the previous one using RFC 6902 JSON Patch (`fast-json-patch`, `rfc6902`, or Immer's `produceWithPatches`) +3. Apply only changed paths to the existing state object, preserving references for unchanged subtrees + +**Pros**: Simple parsing, well-understood diffing, works naturally with immutable state patterns +**Cons**: O(n²) parsing + O(tree) diffing per token. Two-pass (parse then diff). + +### B. Incremental Mutation + Explicit Signal Notification + +1. Use `jsonriver` for O(n) incremental parsing with stable references +2. Wrap in an adapter that tracks which paths mutated on each token +3. Explicitly notify Angular signals only for changed paths + +**Pros**: O(n) total, reference stability built-in, single-pass +**Cons**: Mutation model needs careful adaptation for Angular's immutable expectations. Adapter complexity. + +### C. SAX Events to Signal Paths + +1. Use `llm-json-stream` or `@streamparser/json` for event-driven parsing +2. Map each `value` event directly to a specific signal by JSON path +3. Fine-grained per-path signals — maximum reactivity precision + +**Pros**: O(n) total, most granular signal updates, no diffing needed +**Cons**: Requires schema knowledge up front. Complex wiring. Doesn't produce a usable intermediate object. + +## How Existing Frameworks Handle This + +### Vercel AI SDK (`streamObject`) +- Custom `parsePartialJson` in `@ai-sdk/provider-utils` (no external dependency) +- Accumulates text, full re-parse on every chunk, yields fresh objects +- O(n²) — a rejected PR (#1883) showed 100x speedup with true streaming parser (3.79 Hz to 391 Hz on 5306 tokens) +- No object reference stability — relies on React's reconciliation + +### LangChain (`JsonOutputParser`) +- `partial=True`: yields entire partial object (re-parsed each time) +- `diff=True`: yields **RFC 6902 JSON Patch operations** describing the delta between successive parses — the closest production precedent for incremental patching + +### @json-render/core (Already in Our Dependencies) +- Has `compileSpecStream()`, `applySpecPatch()`, `parseSpecStreamLine()` — a SpecStream format based on RFC 6902 patches +- Designed for progressively building a `Spec` via patch operations +- Not currently wired up in the codebase + +## Performance Data + +| Output Size | Re-parse per Token | Notes | +|------------|-------------------|-------| +| <10KB | <0.5ms | Imperceptible. Re-parse is fine. | +| ~5KB (688 chunks) | 5.4ms at completion | Aha.io measured this; still acceptable | +| >50KB | 19-20ms+ | Becomes a bottleneck. Incremental parsing needed. | + +Aha.io's stateful parser was **388x faster** than full re-parse at scale (16.7s total down to 43ms). + +## Key Insight + +**No library solves "partial JSON parsing + object reference stability for reactive frameworks" as a complete package.** This is an ecosystem gap. The design space is: + +- `partial-json` gives you correctness and simplicity but no stability +- `jsonriver` gives you stability but via mutation +- The missing piece is an adapter that bridges one of these to Angular's signal model with structural sharing diff --git a/docs/superpowers/specs/2026-04-08-streaming-generative-ui-design.md b/docs/superpowers/specs/2026-04-08-streaming-generative-ui-design.md new file mode 100644 index 000000000..fd55ed14b --- /dev/null +++ b/docs/superpowers/specs/2026-04-08-streaming-generative-ui-design.md @@ -0,0 +1,479 @@ +# Streaming Generative UI — Design Spec + +**Date:** 2026-04-08 +**Status:** Draft — pending spike verification + +## Goal + +The `ChatComponent` should auto-detect and render generative UI content (json-render Spec, A2UI, or markdown) from AI message streams. This requires: + +1. A custom **partial JSON parser** library with a tree-based data structure that supports character-level streaming of property values +2. A **content classifier** that detects content type and routes to the appropriate parser +3. **Render lib optimizations** for efficient re-rendering when the spec changes incrementally +4. **Chat component integration** that wires classification, parsing, and rendering together + +## Architecture Overview + +``` +AI message content (growing token-by-token) + → ContentClassifier (per-message, stateful) + → Detection: JSON object? JSONL patches? A2UI delimiter? Markdown? + → Routes to appropriate parser: + ├── PartialJsonParser (tree-based) → character-level prop streaming + ├── SpecStreamCompiler (@json-render/core) → JSONL patch accumulation + ├── A2UI accumulator (future) + └── Markdown accumulator (passthrough) + → ParseTreeStore bridges parse tree → Spec signal (structural sharing) + → ChatComponent template renders markdown and/or <chat-generative-ui> + → RenderSpecComponent renders with element-level memoization +``` + +## Part 1: Render Lib — Element-Level Memoization + +### Problem + +Every patch to the `spec` signal causes every `RenderElementComponent` to recompute its `element()`, `componentClass()`, `visible()`, and `resolvedInputs()` computeds — even when the specific element hasn't changed. The `immutableSetByPath()` function from `@json-render/core` provides structural sharing (unchanged elements keep the same object reference), but `RenderElementComponent` doesn't leverage this because Angular's `computed()` tracks the `spec()` signal as a dependency and re-evaluates on every reference change. + +### Solution + +Use reference-equality checking on the `element()` computed so downstream computations skip when the element reference is unchanged: + +```ts +// Before +readonly element = computed(() => this.spec().elements[this.elementKey()]); + +// After — only propagates when the element reference actually changes +readonly element = computed( + () => this.spec().elements?.[this.elementKey()], + { equal: Object.is } // Angular 19+ computed equality +); +``` + +### Impact + +- A patch to `/elements/el-5` only triggers re-render of `el-5`, not `el-1` through `el-4` +- The `spec` signal can change on every token without cascading to all elements +- No API changes — consumers still pass a `Spec` and it renders +- The optimization is purely internal to the render lib + +### Files Changed + +- `libs/render/src/lib/render-element.component.ts` — add `equal` option to `element()` computed + +--- + +## Part 2: Partial JSON Parser Library + +### Purpose + +A standalone, framework-agnostic TypeScript library that parses streaming JSON character-by-character into a live parse tree. Unlike existing libraries (`partial-json`, `jsonriver`) that either re-parse the full string or use opaque mutation, this library: + +- Builds an explicit tree where each node has stable identity, type, value, and status +- Processes each character exactly once — O(n) total +- Emits fine-grained events (node created, value updated, node completed) +- Supports path-based observation for reactive integration +- Materializes to plain objects with structural sharing on demand + +### Node Types + +```ts +type JsonNodeType = 'object' | 'array' | 'string' | 'number' | 'boolean' | 'null'; + +interface JsonNode { + /** Stable identity — assigned on creation, never changes */ + readonly id: number; + + /** What kind of JSON value this node represents */ + readonly type: JsonNodeType; + + /** Parsing state */ + status: 'pending' | 'streaming' | 'complete'; + + /** Parent node (null for root) */ + parent: JsonNode | null; + + /** Key in parent — string for object properties, number for array indices */ + key: string | number | null; +} + +interface JsonObjectNode extends JsonNode { + type: 'object'; + children: Map<string, JsonNode>; + pendingKey: string | null; +} + +interface JsonArrayNode extends JsonNode { + type: 'array'; + children: JsonNode[]; +} + +interface JsonStringNode extends JsonNode { + type: 'string'; + /** Grows character-by-character as tokens arrive */ + value: string; +} + +interface JsonNumberNode extends JsonNode { + type: 'number'; + raw: string; + value: number | null; // Parsed when node completes +} + +interface JsonBooleanNode extends JsonNode { + type: 'boolean'; + value: boolean; +} + +interface JsonNullNode extends JsonNode { + type: 'null'; +} +``` + +### Parser API + +```ts +interface ParseEvent { + type: 'node-created' | 'value-updated' | 'node-completed'; + node: JsonNode; + /** For value-updated on strings: the characters appended this push */ + delta?: string; +} + +interface PartialJsonParser { + /** Feed characters. Returns events for what changed. */ + push(chunk: string): ParseEvent[]; + + /** Root node of the parse tree */ + readonly root: JsonNode | null; + + /** Look up a node by JSON Pointer path */ + getByPath(path: string): JsonNode | null; + + /** Subscribe to changes at a specific path */ + observe(path: string, callback: (event: ParseEvent) => void): () => void; + + /** Materialize tree (or subtree) to a plain JS value */ + toJSON(): unknown; + toJSON(node: JsonNode): unknown; +} +``` + +### Parsing State Machine + +The parser maintains a stack of open container nodes and a state enum: + +``` +EXPECT_VALUE → IN_STRING → (back to parent state) + → IN_NUMBER → (back to parent state) + → IN_KEYWORD (true/false/null) → (back to parent state) + → OPEN_OBJECT → EXPECT_KEY → IN_KEY_STRING → EXPECT_COLON → EXPECT_VALUE → ... + → OPEN_ARRAY → EXPECT_VALUE → ... +``` + +Each character is processed once. String values grow via append — the `JsonStringNode.value` extends in-place and the parser emits `value-updated` with the delta. + +### Materialization with Structural Sharing + +`toJSON()` walks the tree and produces plain JS objects. When called repeatedly (as tokens stream), it uses structural sharing: + +- Each node caches its last materialized value +- On `toJSON()`, if a node's status hasn't changed and no descendants have changed, return the cached value (same reference) +- If a descendant changed, shallow-clone ancestors up to root, reuse unchanged siblings +- This is equivalent to `immutableSetByPath()` but driven by the tree's own change tracking + +### Library Boundary + +- Package: `@cacheplane/partial-json` (or similar — standalone npm package) +- Zero dependencies, pure TypeScript +- No framework coupling — Angular/React/etc. integration is external +- Targets: ESM, CJS, types + +--- + +## Part 3: Content Classifier + +### Purpose + +A stateful, per-message service that: +1. Detects content type from the token stream +2. Routes content to the appropriate parser +3. Exposes classified results as Angular signals + +### Detection Rules (Applied in Order) + +| Priority | Trigger | Content Type | Parser Used | +|----------|---------|-------------|-------------| +| 1 | First non-whitespace is `{` | `json-render` | PartialJsonParser | +| 2 | ` ```spec ` fence detected | `mixed` (or `json-render` if no preceding prose) | SpecStreamCompiler | +| 3 | Line starts with `{"op":` | `json-render` | SpecStreamCompiler | +| 4 | `---a2ui_JSON---` delimiter | `a2ui` (or `mixed` if prose precedes) | A2UI accumulator | +| 5 | Any other text | `markdown` | String accumulator | + +### State Transitions + +``` +undetermined ──── { ──────────────────→ json-render (partial JSON path) + │ + ├──── ```spec ───────────────────→ mixed or json-render (SpecStream path) + │ + ├──── {"op": ───────────────────→ json-render (SpecStream path) + │ + ├──── ---a2ui_JSON--- ──────────→ a2ui + │ + └──── any other text ───────────→ markdown + +markdown ──── ```spec ───────────────→ mixed (prose preserved, patches start) +markdown ──── ---a2ui_JSON--- ───────→ mixed (prose preserved, A2UI starts) +``` + +Type can upgrade (`markdown` → `mixed`) but never downgrade. + +### Interface + +```ts +interface ContentClassifier { + /** Feed the full message content snapshot. Internally computes delta. */ + update(content: string): void; + + /** Reactive signals for classified output */ + readonly type: Signal<'undetermined' | 'markdown' | 'json-render' | 'a2ui' | 'mixed'>; + readonly markdown: Signal<string>; + readonly spec: Signal<Spec | null>; + readonly elementStates: Signal<Map<string, ElementAccumulationState>>; + readonly streaming: Signal<boolean>; + + dispose(): void; +} + +interface ContentClassifierFactory { + create(): ContentClassifier; +} +``` + +### Internal Components + +Each classifier instance holds: + +- `processedLength: number` — tracks how much content has been consumed +- `DetectionState` — which content type we've committed to +- `PartialJsonParser` — instantiated when JSON object detected +- `ParseTreeStore` — bridges parse tree to Spec signal with structural sharing +- `SpecStreamCompiler` (from `@json-render/core`) — for JSONL patch path +- `MixedStreamParser` (from `@json-render/core`) — splits prose from patches +- `markdownAccumulator: string` — accumulated prose +- Angular signals for all public outputs + +### Delta Processing + +```ts +update(content: string): void { + const delta = content.slice(this.processedLength); + if (!delta.length) return; + this.processedLength = content.length; + + if (this.detectionState === 'undetermined') { + this.detect(delta); + } + + switch (this.detectionState) { + case 'markdown': + this.markdownAccumulator += delta; + this.checkForStructuredTransition(delta); + break; + case 'json-render-partial': + this.partialJsonParser.push(delta); + // ParseTreeStore handles materialization → spec signal + break; + case 'json-render-specstream': + case 'mixed': + this.mixedStreamParser.push(delta); + // onText → markdownAccumulator + // onPatch → specStreamCompiler → spec signal + break; + case 'a2ui': + this.a2uiAccumulator += delta; + break; + } + + this.updateSignals(); +} +``` + +--- + +## Part 4: ParseTreeStore — Bridging Parse Tree to Render Lib + +### Purpose + +Adapts the PartialJsonParser's event stream into a `Spec` signal with structural sharing, plus per-element accumulation tracking. This is the glue between the parser library (framework-agnostic) and the Angular render lib. + +### Interface + +```ts +function createParseTreeStore(parser: PartialJsonParser): ParseTreeStore; + +interface ParseTreeStore { + /** Push characters to the parser and process events */ + push(chunk: string): void; + + /** Current materialized spec (structurally shared between updates) */ + readonly spec: Signal<Spec | null>; + + /** Per-element accumulation tracking */ + readonly elementStates: Signal<Map<string, ElementAccumulationState>>; +} +``` + +### ElementAccumulationState + +```ts +interface ElementAccumulationState { + hasType: boolean; // /elements/{key}/type received + hasProps: boolean; // /elements/{key}/props received (at least partially) + hasChildren: boolean; // /elements/{key}/children received + streaming: boolean; // still receiving events targeting this element +} +``` + +### Materialization Strategy + +When `push(chunk)` is called: + +1. Parser processes characters, emits `ParseEvent[]` +2. Collect all affected paths from events +3. Batch-materialize: for each unique root-level change, walk up from changed node to root with shallow clones (structural sharing) +4. Update `spec` signal with the new structurally-shared Spec +5. Update `elementStates` signal from event paths + +**Example — a single token extends a prop value:** + +``` +Event: value-updated at /elements/el-1/props/title (delta: "lo") + +Materialization chain (bottom-up shallow clones): + /elements/el-1/props/title → "Hello" (new string) + /elements/el-1/props → { ...prev, title: "Hello" } (shallow clone) + /elements/el-1 → { ...prev, props: newProps } (shallow clone) + /elements → { ...prev, "el-1": newEl } (shallow clone) + spec → { ...prev, elements: newEls } (shallow clone) + +Everything else (el-2, el-3, state, root) → previous references unchanged. +``` + +### Location + +Lives in the `chat` library (`libs/chat`) since it bridges the framework-agnostic parser to Angular signals. Could be promoted to the `render` library if other consumers need it. + +--- + +## Part 5: Chat Component Integration + +### Template Changes + +The AI message template switches from markdown-only to classified rendering: + +```html +<ng-template chatMessageTemplate="ai" let-message let-index="index"> + @let classified = classifyMessage(message, index); + + <!-- Prose portion --> + @if (classified.markdown()) { + <div class="ai-prose" [innerHTML]="renderMd(classified.markdown())"></div> + } + + <!-- JSON-Render spec --> + @if (classified.spec(); as spec) { + <chat-generative-ui + [spec]="spec" + [registry]="chatConfig.renderRegistry" + [loading]="ref().isLoading()" + /> + } + + <!-- A2UI (future) --> + @if (classified.type() === 'a2ui') { + <!-- A2UI renderer placeholder --> + } +</ng-template> +``` + +### Classifier Lifecycle + +```ts +private classifiers = new Map<number, ContentClassifier>(); +private classifierFactory = inject(ContentClassifierFactory); + +classifyMessage(message: BaseMessage, index: number): ContentClassifier { + let classifier = this.classifiers.get(index); + if (!classifier) { + classifier = this.classifierFactory.create(); + this.classifiers.set(index, classifier); + } + classifier.update(messageContent(message)); + return classifier; +} +``` + +Classifiers are cached per message index. When messages are cleared (thread switch, reset), all classifiers are disposed and the cache is cleared. + +### ChatConfig Changes + +The existing `renderRegistry` field on `ChatConfig` is wired to `<chat-generative-ui>`. No new config fields needed for the initial implementation. + +```ts +interface ChatConfig { + renderRegistry?: AngularRegistry; // Already exists — now used + avatarLabel?: string; + assistantName?: string; +} +``` + +### Pure Markdown Fast Path + +When `classified.type()` is `'markdown'` and `classified.spec()` is null, the template renders only the prose div — identical to today's behavior. No parser instantiated, no spec materialization, no overhead. + +--- + +## Part 6: A2UI Support (Future — Detection Only) + +A2UI detection is included in the content classifier now, but rendering is deferred. The classifier will: + +1. Detect the `---a2ui_JSON---` delimiter +2. Split prose from the A2UI JSON payload +3. Accumulate the payload into an `a2ui` signal +4. Expose `type() === 'a2ui'` or `type() === 'mixed'` + +A2UI rendering requires: +- A2UI catalog/schema support (mapping A2UI component names to Angular components) +- A2UI message type handling (`createSurface`, `updateComponents`, `updateDataModel`, `deleteSurface`) +- A separate design spec when we're ready to implement + +--- + +## Deliverables + +| # | Deliverable | Package | Description | +|---|------------|---------|-------------| +| 1 | Partial JSON Parser | `@cacheplane/partial-json` (new Nx lib at `libs/partial-json`) | Tree-based streaming JSON parser with events and materialization | +| 2 | Render lib memoization | `@cacheplane/render` | Element-level reference equality on `computed()` | +| 3 | ParseTreeStore | `@cacheplane/chat` | Bridges parse tree events to Spec signals with structural sharing | +| 4 | ContentClassifier | `@cacheplane/chat` | Per-message content detection and routing | +| 5 | Chat component integration | `@cacheplane/chat` | Template + lifecycle changes for generative UI rendering | + +## Spike Verification (Pre-Implementation) + +Before full implementation, an end-to-end spike will validate the critical path: + +**Spike scope:** Hardcoded token stream → PartialJsonParser → Spec materialization with structural sharing → `<render-spec>` rendering components that update as tokens stream. + +**What it proves:** +- The parse tree correctly builds from character-by-character input +- Materialization produces valid `Spec` objects that `<render-spec>` can render +- Structural sharing works — unchanged elements keep references, render lib skips re-render +- Character-level prop streaming is visible in the rendered UI + +**What it defers:** +- Content detection logic +- Integration with real LangGraph message streams +- A2UI support +- Production error handling and edge cases From 98134d2094f9a8593c61b707551a2fc45e343721 Mon Sep 17 00:00:00 2001 From: Brian Love <brian@liveloveapp.com> Date: Wed, 8 Apr 2026 12:47:49 -0700 Subject: [PATCH 04/13] docs: add streaming generative UI implementation plan --- .../2026-04-08-streaming-generative-ui.md | 2087 +++++++++++++++++ 1 file changed, 2087 insertions(+) create mode 100644 docs/superpowers/plans/2026-04-08-streaming-generative-ui.md diff --git a/docs/superpowers/plans/2026-04-08-streaming-generative-ui.md b/docs/superpowers/plans/2026-04-08-streaming-generative-ui.md new file mode 100644 index 000000000..4ca81ba67 --- /dev/null +++ b/docs/superpowers/plans/2026-04-08-streaming-generative-ui.md @@ -0,0 +1,2087 @@ +# Streaming Generative UI — Implementation Plan + +> **For agentic workers:** REQUIRED SUB-SKILL: Use superpowers:subagent-driven-development (recommended) or superpowers:executing-plans to implement this plan task-by-task. Steps use checkbox (`- [ ]`) syntax for tracking. + +**Goal:** Auto-detect and render generative UI content (json-render Spec, markdown) from AI message streams with character-level streaming and element-level memoization. + +**Architecture:** A new standalone `@cacheplane/partial-json` library provides tree-based streaming JSON parsing. The render lib gets element-level `computed()` equality for memoization. The chat lib gains a `ContentClassifier` that detects content type and routes to the partial JSON parser or markdown accumulator, plus a `ParseTreeStore` that bridges parse tree events to structurally-shared `Spec` signals. The `ChatComponent` template switches from markdown-only to classified rendering. + +**Tech Stack:** Angular 20+ signals, Vitest, TypeScript, `@json-render/core` (Spec types), Nx monorepo + +--- + +## File Structure + +### New Library: `libs/partial-json/` + +| File | Responsibility | +|------|---------------| +| `src/index.ts` | Public API barrel | +| `src/lib/types.ts` | Node types, ParseEvent, parser interface | +| `src/lib/parser.ts` | Character-by-character state machine parser | +| `src/lib/parser.spec.ts` | Parser unit tests | +| `src/lib/materialize.ts` | Tree → plain JS with structural sharing | +| `src/lib/materialize.spec.ts` | Materialization + structural sharing tests | +| `project.json` | Nx project config | +| `package.json` | NPM metadata | +| `tsconfig.json` | TS config | +| `tsconfig.lib.json` | Lib build config | +| `vite.config.mts` | Vitest config | + +### Modified: `libs/render/` + +| File | Change | +|------|--------| +| `src/lib/render-element.component.ts` | Add `{ equal: Object.is }` to `element()` computed | +| `src/lib/render-element.component.spec.ts` | Add memoization test | + +### New/Modified: `libs/chat/` + +| File | Responsibility | +|------|---------------| +| `src/lib/streaming/content-classifier.ts` | Content type detection + routing (NEW) | +| `src/lib/streaming/content-classifier.spec.ts` | Classifier tests (NEW) | +| `src/lib/streaming/parse-tree-store.ts` | Bridges parser → Spec signal (NEW) | +| `src/lib/streaming/parse-tree-store.spec.ts` | Store tests (NEW) | +| `src/lib/compositions/chat/chat.component.ts` | Template + classifier lifecycle (MODIFY) | +| `src/lib/compositions/chat/chat.component.spec.ts` | Integration tests (MODIFY) | +| `src/public-api.ts` | Export new types (MODIFY) | + +--- + +### Task 1: Scaffold `@cacheplane/partial-json` Library + +**Files:** +- Create: `libs/partial-json/project.json` +- Create: `libs/partial-json/package.json` +- Create: `libs/partial-json/tsconfig.json` +- Create: `libs/partial-json/tsconfig.lib.json` +- Create: `libs/partial-json/vite.config.mts` +- Create: `libs/partial-json/src/index.ts` +- Modify: `tsconfig.base.json` + +- [ ] **Step 1: Create project.json** + +```json +{ + "name": "partial-json", + "$schema": "../../node_modules/nx/schemas/project-schema.json", + "sourceRoot": "libs/partial-json/src", + "projectType": "library", + "tags": ["scope:shared", "type:lib"], + "targets": { + "build": { + "executor": "@nx/js:tsc", + "outputs": ["{workspaceRoot}/dist/libs/partial-json"], + "options": { + "outputPath": "dist/libs/partial-json", + "main": "libs/partial-json/src/index.ts", + "tsConfig": "libs/partial-json/tsconfig.lib.json" + } + }, + "lint": { + "executor": "@nx/eslint:lint" + }, + "test": { + "executor": "@nx/vite:test", + "options": { + "configFile": "libs/partial-json/vite.config.mts" + } + } + } +} +``` + +- [ ] **Step 2: Create package.json** + +```json +{ + "name": "@cacheplane/partial-json", + "version": "0.0.1", + "license": "PolyForm-Noncommercial-1.0.0", + "sideEffects": false +} +``` + +- [ ] **Step 3: Create tsconfig.json** + +```json +{ + "extends": "../../tsconfig.base.json", + "files": [], + "references": [ + { "path": "./tsconfig.lib.json" } + ] +} +``` + +- [ ] **Step 4: Create tsconfig.lib.json** + +```json +{ + "extends": "./tsconfig.json", + "compilerOptions": { + "outDir": "../../dist/out-tsc", + "declaration": true + }, + "include": ["src/**/*.ts"], + "exclude": ["src/**/*.spec.ts"] +} +``` + +- [ ] **Step 5: Create vite.config.mts** + +```ts +import { defineConfig } from 'vite'; +import { nxViteTsPaths } from '@nx/vite/plugins/nx-tsconfig-paths.plugin'; + +export default defineConfig({ + plugins: [nxViteTsPaths()], + test: { + environment: 'node', + globals: true, + include: ['src/**/*.spec.ts'], + }, +}); +``` + +- [ ] **Step 6: Create src/index.ts (empty barrel)** + +```ts +// Public API — populated as modules are added +``` + +- [ ] **Step 7: Add path mapping to tsconfig.base.json** + +Add to `compilerOptions.paths`: + +```json +"@cacheplane/partial-json": ["libs/partial-json/src/index.ts"] +``` + +- [ ] **Step 8: Verify the scaffold compiles** + +Run: `npx nx test partial-json` +Expected: PASS (no tests, passWithNoTests is the vitest default via globals) + +- [ ] **Step 9: Commit** + +```bash +git add libs/partial-json/ tsconfig.base.json +git commit -m "chore: scaffold @cacheplane/partial-json library" +``` + +--- + +### Task 2: Partial JSON Parser — Types + +**Files:** +- Create: `libs/partial-json/src/lib/types.ts` +- Modify: `libs/partial-json/src/index.ts` + +- [ ] **Step 1: Write the types file** + +```ts +// SPDX-License-Identifier: PolyForm-Noncommercial-1.0.0 + +/** Kinds of JSON values a node can represent. */ +export type JsonNodeType = 'object' | 'array' | 'string' | 'number' | 'boolean' | 'null'; + +/** Parsing state of a node. */ +export type JsonNodeStatus = 'pending' | 'streaming' | 'complete'; + +/** Base shape shared by all nodes. */ +export interface JsonNodeBase { + /** Stable identity — assigned on creation, never changes. */ + readonly id: number; + /** What kind of JSON value this node represents. */ + readonly type: JsonNodeType; + /** Parsing state. */ + status: JsonNodeStatus; + /** Parent node (null for root). */ + parent: JsonNode | null; + /** Key in parent — string for object properties, number for array indices. */ + key: string | number | null; +} + +export interface JsonObjectNode extends JsonNodeBase { + readonly type: 'object'; + children: Map<string, JsonNode>; + /** Key currently being built (between quote open and colon). */ + pendingKey: string | null; +} + +export interface JsonArrayNode extends JsonNodeBase { + readonly type: 'array'; + children: JsonNode[]; +} + +export interface JsonStringNode extends JsonNodeBase { + readonly type: 'string'; + /** Grows character-by-character as tokens arrive. */ + value: string; +} + +export interface JsonNumberNode extends JsonNodeBase { + readonly type: 'number'; + /** Raw characters accumulated so far. */ + raw: string; + /** Parsed value — set when node completes. */ + value: number | null; +} + +export interface JsonBooleanNode extends JsonNodeBase { + readonly type: 'boolean'; + value: boolean; +} + +export interface JsonNullNode extends JsonNodeBase { + readonly type: 'null'; +} + +export type JsonNode = + | JsonObjectNode + | JsonArrayNode + | JsonStringNode + | JsonNumberNode + | JsonBooleanNode + | JsonNullNode; + +/** Events emitted by the parser as the tree changes. */ +export interface ParseEvent { + type: 'node-created' | 'value-updated' | 'node-completed'; + node: JsonNode; + /** For value-updated on strings: the characters appended this push. */ + delta?: string; +} + +/** Push-based streaming JSON parser. */ +export interface PartialJsonParser { + /** Feed characters. Returns events for what changed. */ + push(chunk: string): ParseEvent[]; + /** Root node of the parse tree. */ + readonly root: JsonNode | null; + /** Look up a node by JSON Pointer path (e.g., "/elements/el-1/props"). */ + getByPath(path: string): JsonNode | null; +} +``` + +- [ ] **Step 2: Update barrel export** + +```ts +// SPDX-License-Identifier: PolyForm-Noncommercial-1.0.0 +export type { + JsonNodeType, JsonNodeStatus, JsonNodeBase, + JsonObjectNode, JsonArrayNode, JsonStringNode, + JsonNumberNode, JsonBooleanNode, JsonNullNode, + JsonNode, ParseEvent, PartialJsonParser, +} from './lib/types'; +``` + +- [ ] **Step 3: Verify types compile** + +Run: `npx nx test partial-json` +Expected: PASS + +- [ ] **Step 4: Commit** + +```bash +git add libs/partial-json/ +git commit -m "feat(partial-json): add node types and parser interface" +``` + +--- + +### Task 3: Partial JSON Parser — State Machine + +**Files:** +- Create: `libs/partial-json/src/lib/parser.ts` +- Create: `libs/partial-json/src/lib/parser.spec.ts` +- Modify: `libs/partial-json/src/index.ts` + +- [ ] **Step 1: Write failing tests for core parsing** + +Create `libs/partial-json/src/lib/parser.spec.ts`: + +```ts +// SPDX-License-Identifier: PolyForm-Noncommercial-1.0.0 +import { describe, it, expect } from 'vitest'; +import { createPartialJsonParser } from './parser'; + +describe('createPartialJsonParser', () => { + describe('strings', () => { + it('parses a complete string', () => { + const parser = createPartialJsonParser(); + parser.push('"hello"'); + expect(parser.root).not.toBeNull(); + expect(parser.root!.type).toBe('string'); + expect((parser.root as any).value).toBe('hello'); + expect(parser.root!.status).toBe('complete'); + }); + + it('streams a string character-by-character', () => { + const parser = createPartialJsonParser(); + parser.push('"he'); + expect(parser.root!.type).toBe('string'); + expect((parser.root as any).value).toBe('he'); + expect(parser.root!.status).toBe('streaming'); + + parser.push('llo"'); + expect((parser.root as any).value).toBe('hello'); + expect(parser.root!.status).toBe('complete'); + }); + + it('emits value-updated events with delta for strings', () => { + const parser = createPartialJsonParser(); + const events1 = parser.push('"he'); + // Should have node-created + value-updated + const created = events1.find(e => e.type === 'node-created'); + expect(created).toBeDefined(); + + const events2 = parser.push('llo"'); + const updated = events2.find(e => e.type === 'value-updated'); + expect(updated).toBeDefined(); + expect(updated!.delta).toBe('llo'); + }); + + it('handles escaped characters in strings', () => { + const parser = createPartialJsonParser(); + parser.push('"hello\\nworld"'); + expect((parser.root as any).value).toBe('hello\nworld'); + }); + + it('handles escaped quotes in strings', () => { + const parser = createPartialJsonParser(); + parser.push('"say \\"hi\\""'); + expect((parser.root as any).value).toBe('say "hi"'); + }); + + it('handles unicode escapes', () => { + const parser = createPartialJsonParser(); + parser.push('"\\u0041"'); + expect((parser.root as any).value).toBe('A'); + }); + }); + + describe('numbers', () => { + it('parses an integer', () => { + const parser = createPartialJsonParser(); + parser.push('42'); + expect(parser.root!.type).toBe('number'); + // Number is still streaming (no terminator seen) + expect((parser.root as any).raw).toBe('42'); + }); + + it('completes a number when followed by comma or brace', () => { + const parser = createPartialJsonParser(); + parser.push('[42]'); + const arr = parser.root as any; + expect(arr.type).toBe('array'); + expect(arr.children[0].type).toBe('number'); + expect(arr.children[0].value).toBe(42); + expect(arr.children[0].status).toBe('complete'); + }); + + it('parses negative and decimal numbers', () => { + const parser = createPartialJsonParser(); + parser.push('[-3.14]'); + const arr = parser.root as any; + expect(arr.children[0].value).toBe(-3.14); + }); + }); + + describe('booleans and null', () => { + it('parses true', () => { + const parser = createPartialJsonParser(); + parser.push('true'); + expect(parser.root!.type).toBe('boolean'); + expect((parser.root as any).value).toBe(true); + }); + + it('parses false', () => { + const parser = createPartialJsonParser(); + parser.push('false'); + expect(parser.root!.type).toBe('boolean'); + expect((parser.root as any).value).toBe(false); + }); + + it('parses null', () => { + const parser = createPartialJsonParser(); + parser.push('null'); + expect(parser.root!.type).toBe('null'); + }); + + it('handles partial keywords gracefully', () => { + const parser = createPartialJsonParser(); + parser.push('tru'); + // Should be pending/streaming, not errored + expect(parser.root).not.toBeNull(); + }); + }); + + describe('objects', () => { + it('parses a simple object', () => { + const parser = createPartialJsonParser(); + parser.push('{"name":"Alice"}'); + expect(parser.root!.type).toBe('object'); + const obj = parser.root as any; + expect(obj.children.get('name').type).toBe('string'); + expect(obj.children.get('name').value).toBe('Alice'); + expect(obj.status).toBe('complete'); + }); + + it('streams an object property value', () => { + const parser = createPartialJsonParser(); + parser.push('{"name":"Al'); + const obj = parser.root as any; + expect(obj.children.get('name').value).toBe('Al'); + expect(obj.children.get('name').status).toBe('streaming'); + + parser.push('ice"}'); + expect(obj.children.get('name').value).toBe('Alice'); + expect(obj.children.get('name').status).toBe('complete'); + }); + + it('parses multiple properties', () => { + const parser = createPartialJsonParser(); + parser.push('{"a":1,"b":2}'); + const obj = parser.root as any; + expect(obj.children.get('a').value).toBe(1); + expect(obj.children.get('b').value).toBe(2); + }); + + it('parses nested objects', () => { + const parser = createPartialJsonParser(); + parser.push('{"outer":{"inner":"val"}}'); + const obj = parser.root as any; + const inner = obj.children.get('outer'); + expect(inner.type).toBe('object'); + expect(inner.children.get('inner').value).toBe('val'); + }); + }); + + describe('arrays', () => { + it('parses a simple array', () => { + const parser = createPartialJsonParser(); + parser.push('[1,2,3]'); + const arr = parser.root as any; + expect(arr.type).toBe('array'); + expect(arr.children.length).toBe(3); + expect(arr.children[0].value).toBe(1); + expect(arr.children[2].value).toBe(3); + }); + + it('parses array of strings', () => { + const parser = createPartialJsonParser(); + parser.push('["a","b"]'); + const arr = parser.root as any; + expect(arr.children[0].value).toBe('a'); + expect(arr.children[1].value).toBe('b'); + }); + + it('parses nested arrays', () => { + const parser = createPartialJsonParser(); + parser.push('[[1,2],[3]]'); + const arr = parser.root as any; + expect(arr.children.length).toBe(2); + expect(arr.children[0].children[1].value).toBe(2); + }); + }); + + describe('streaming complex structures', () => { + it('builds a Spec-like structure token-by-token', () => { + const parser = createPartialJsonParser(); + const json = '{"root":"r1","elements":{"r1":{"type":"Text","props":{"label":"Hello"}}}}'; + + // Feed character by character + for (const ch of json) { + parser.push(ch); + } + + const obj = parser.root as any; + expect(obj.type).toBe('object'); + expect(obj.children.get('root').value).toBe('r1'); + const elements = obj.children.get('elements'); + expect(elements.type).toBe('object'); + const r1 = elements.children.get('r1'); + expect(r1.children.get('type').value).toBe('Text'); + expect(r1.children.get('props').children.get('label').value).toBe('Hello'); + }); + + it('maintains stable node identities across pushes', () => { + const parser = createPartialJsonParser(); + parser.push('{"name":"'); + const rootId = parser.root!.id; + const nameNode = (parser.root as any).children.get('name'); + const nameId = nameNode.id; + + parser.push('Alice"}'); + // Same root, same name node + expect(parser.root!.id).toBe(rootId); + expect((parser.root as any).children.get('name').id).toBe(nameId); + }); + }); + + describe('getByPath', () => { + it('returns root for empty path', () => { + const parser = createPartialJsonParser(); + parser.push('{"a":1}'); + expect(parser.getByPath('')).toBe(parser.root); + }); + + it('navigates object properties', () => { + const parser = createPartialJsonParser(); + parser.push('{"elements":{"r1":{"type":"Text"}}}'); + const node = parser.getByPath('/elements/r1/type'); + expect(node).not.toBeNull(); + expect(node!.type).toBe('string'); + expect((node as any).value).toBe('Text'); + }); + + it('navigates array indices', () => { + const parser = createPartialJsonParser(); + parser.push('{"items":["a","b","c"]}'); + const node = parser.getByPath('/items/1'); + expect(node).not.toBeNull(); + expect((node as any).value).toBe('b'); + }); + + it('returns null for non-existent paths', () => { + const parser = createPartialJsonParser(); + parser.push('{"a":1}'); + expect(parser.getByPath('/b')).toBeNull(); + }); + }); + + describe('whitespace handling', () => { + it('handles whitespace between tokens', () => { + const parser = createPartialJsonParser(); + parser.push('{ "a" : 1 , "b" : 2 }'); + const obj = parser.root as any; + expect(obj.children.get('a').value).toBe(1); + expect(obj.children.get('b').value).toBe(2); + }); + + it('skips leading whitespace', () => { + const parser = createPartialJsonParser(); + parser.push(' \n\t"hello"'); + expect(parser.root!.type).toBe('string'); + }); + }); +}); +``` + +- [ ] **Step 2: Run tests to verify they fail** + +Run: `npx nx test partial-json` +Expected: FAIL — `createPartialJsonParser` not found + +- [ ] **Step 3: Implement the parser** + +Create `libs/partial-json/src/lib/parser.ts`: + +```ts +// SPDX-License-Identifier: PolyForm-Noncommercial-1.0.0 +import type { + JsonNode, JsonObjectNode, JsonArrayNode, JsonStringNode, + JsonNumberNode, JsonBooleanNode, JsonNullNode, + ParseEvent, PartialJsonParser, +} from './types'; + +const enum State { + EXPECT_VALUE, + IN_STRING, + IN_STRING_ESCAPE, + IN_STRING_UNICODE, + IN_NUMBER, + IN_KEYWORD, + EXPECT_KEY, + IN_KEY_STRING, + IN_KEY_STRING_ESCAPE, + IN_KEY_STRING_UNICODE, + EXPECT_COLON, + AFTER_VALUE, +} + +const WHITESPACE = new Set([' ', '\t', '\n', '\r']); +const KEYWORDS: Record<string, { type: 'boolean' | 'null'; value: boolean | null }> = { + true: { type: 'boolean', value: true }, + false: { type: 'boolean', value: false }, + null: { type: 'null', value: null }, +}; + +export function createPartialJsonParser(): PartialJsonParser { + let nextId = 0; + let root: JsonNode | null = null; + let state = State.EXPECT_VALUE; + /** Stack of open container/value nodes. Current node is last. */ + const stack: JsonNode[] = []; + /** Buffer for keyword matching (true/false/null). */ + let keywordBuffer = ''; + /** Buffer for object key being read. */ + let keyBuffer = ''; + /** Unicode escape accumulator. */ + let unicodeBuffer = ''; + let unicodeCount = 0; + + function current(): JsonNode | undefined { + return stack[stack.length - 1]; + } + + function createNode<T extends JsonNode>(partial: Omit<T, 'id' | 'parent' | 'key' | 'status'>): T { + return { + ...partial, + id: nextId++, + parent: null, + key: null, + status: 'pending', + } as T; + } + + function attachToParent(node: JsonNode): void { + const parent = current(); + if (!parent) { + root = node; + return; + } + node.parent = parent; + if (parent.type === 'object') { + const objParent = parent as JsonObjectNode; + const key = objParent.pendingKey!; + node.key = key; + objParent.children.set(key, node); + objParent.pendingKey = null; + } else if (parent.type === 'array') { + const arrParent = parent as JsonArrayNode; + node.key = arrParent.children.length; + arrParent.children.push(node); + } + } + + function completeNumber(events: ParseEvent[]): void { + const node = current() as JsonNumberNode; + if (node && node.type === 'number') { + node.value = Number(node.raw); + node.status = 'complete'; + events.push({ type: 'node-completed', node }); + stack.pop(); + } + } + + function afterValue(): void { + const parent = current(); + if (!parent) { + state = State.AFTER_VALUE; + return; + } + if (parent.type === 'object') { + state = State.AFTER_VALUE; + } else if (parent.type === 'array') { + state = State.AFTER_VALUE; + } else { + state = State.AFTER_VALUE; + } + } + + function processEscape(ch: string, node: JsonStringNode, events: ParseEvent[]): string { + switch (ch) { + case '"': return '"'; + case '\\': return '\\'; + case '/': return '/'; + case 'b': return '\b'; + case 'f': return '\f'; + case 'n': return '\n'; + case 'r': return '\r'; + case 't': return '\t'; + case 'u': + unicodeBuffer = ''; + unicodeCount = 0; + return ''; // Will be handled in unicode state + default: return ch; + } + } + + function push(chunk: string): ParseEvent[] { + const events: ParseEvent[] = []; + + for (let i = 0; i < chunk.length; i++) { + const ch = chunk[i]; + + switch (state) { + case State.EXPECT_VALUE: { + if (WHITESPACE.has(ch)) continue; + if (ch === '"') { + const node = createNode<JsonStringNode>({ type: 'string', value: '' }); + node.status = 'streaming'; + attachToParent(node); + stack.push(node); + events.push({ type: 'node-created', node }); + state = State.IN_STRING; + } else if (ch === '{') { + const node = createNode<JsonObjectNode>({ type: 'object', children: new Map(), pendingKey: null }); + node.status = 'streaming'; + attachToParent(node); + stack.push(node); + events.push({ type: 'node-created', node }); + state = State.EXPECT_KEY; + } else if (ch === '[') { + const node = createNode<JsonArrayNode>({ type: 'array', children: [] }); + node.status = 'streaming'; + attachToParent(node); + stack.push(node); + events.push({ type: 'node-created', node }); + state = State.EXPECT_VALUE; + } else if (ch === ']') { + // Empty array close + const arr = current(); + if (arr && arr.type === 'array') { + arr.status = 'complete'; + events.push({ type: 'node-completed', node: arr }); + stack.pop(); + afterValue(); + } + } else if (ch === '-' || (ch >= '0' && ch <= '9')) { + const node = createNode<JsonNumberNode>({ type: 'number', raw: ch, value: null }); + node.status = 'streaming'; + attachToParent(node); + stack.push(node); + events.push({ type: 'node-created', node }); + state = State.IN_NUMBER; + } else if (ch === 't' || ch === 'f' || ch === 'n') { + keywordBuffer = ch; + state = State.IN_KEYWORD; + } + break; + } + + case State.IN_STRING: { + const node = current() as JsonStringNode; + if (ch === '\\') { + state = State.IN_STRING_ESCAPE; + } else if (ch === '"') { + node.status = 'complete'; + events.push({ type: 'node-completed', node }); + stack.pop(); + afterValue(); + } else { + node.value += ch; + events.push({ type: 'value-updated', node, delta: ch }); + } + break; + } + + case State.IN_STRING_ESCAPE: { + const node = current() as JsonStringNode; + if (ch === 'u') { + unicodeBuffer = ''; + unicodeCount = 0; + state = State.IN_STRING_UNICODE; + } else { + const resolved = processEscape(ch, node, events); + node.value += resolved; + events.push({ type: 'value-updated', node, delta: resolved }); + state = State.IN_STRING; + } + break; + } + + case State.IN_STRING_UNICODE: { + const node = current() as JsonStringNode; + unicodeBuffer += ch; + unicodeCount++; + if (unicodeCount === 4) { + const codePoint = parseInt(unicodeBuffer, 16); + const char = String.fromCharCode(codePoint); + node.value += char; + events.push({ type: 'value-updated', node, delta: char }); + state = State.IN_STRING; + } + break; + } + + case State.IN_NUMBER: { + const node = current() as JsonNumberNode; + if ((ch >= '0' && ch <= '9') || ch === '.' || ch === 'e' || ch === 'E' || ch === '+' || ch === '-') { + node.raw += ch; + } else { + // Number ended — process the terminator character + completeNumber(events); + i--; // Re-process this character in the parent state + afterValue(); + } + break; + } + + case State.IN_KEYWORD: { + keywordBuffer += ch; + // Check if we've matched a complete keyword + for (const [keyword, info] of Object.entries(KEYWORDS)) { + if (keyword === keywordBuffer) { + if (info.type === 'boolean') { + const node = createNode<JsonBooleanNode>({ type: 'boolean', value: info.value as boolean }); + node.status = 'complete'; + attachToParent(node); + events.push({ type: 'node-created', node }); + events.push({ type: 'node-completed', node }); + } else { + const node = createNode<JsonNullNode>({ type: 'null' }); + node.status = 'complete'; + attachToParent(node); + events.push({ type: 'node-created', node }); + events.push({ type: 'node-completed', node }); + } + keywordBuffer = ''; + afterValue(); + break; + } + } + // If still a prefix of some keyword, keep accumulating + break; + } + + case State.EXPECT_KEY: { + if (WHITESPACE.has(ch)) continue; + if (ch === '"') { + keyBuffer = ''; + state = State.IN_KEY_STRING; + } else if (ch === '}') { + const obj = current(); + if (obj && obj.type === 'object') { + obj.status = 'complete'; + events.push({ type: 'node-completed', node: obj }); + stack.pop(); + afterValue(); + } + } + break; + } + + case State.IN_KEY_STRING: { + if (ch === '\\') { + state = State.IN_KEY_STRING_ESCAPE; + } else if (ch === '"') { + (current() as JsonObjectNode).pendingKey = keyBuffer; + state = State.EXPECT_COLON; + } else { + keyBuffer += ch; + } + break; + } + + case State.IN_KEY_STRING_ESCAPE: { + if (ch === 'u') { + unicodeBuffer = ''; + unicodeCount = 0; + state = State.IN_KEY_STRING_UNICODE; + } else { + // For key strings, resolve escape the same way + switch (ch) { + case '"': keyBuffer += '"'; break; + case '\\': keyBuffer += '\\'; break; + case '/': keyBuffer += '/'; break; + case 'n': keyBuffer += '\n'; break; + case 'r': keyBuffer += '\r'; break; + case 't': keyBuffer += '\t'; break; + default: keyBuffer += ch; + } + state = State.IN_KEY_STRING; + } + break; + } + + case State.IN_KEY_STRING_UNICODE: { + unicodeBuffer += ch; + unicodeCount++; + if (unicodeCount === 4) { + const codePoint = parseInt(unicodeBuffer, 16); + keyBuffer += String.fromCharCode(codePoint); + state = State.IN_KEY_STRING; + } + break; + } + + case State.EXPECT_COLON: { + if (WHITESPACE.has(ch)) continue; + if (ch === ':') { + state = State.EXPECT_VALUE; + } + break; + } + + case State.AFTER_VALUE: { + if (WHITESPACE.has(ch)) continue; + const parent = current(); + if (ch === ',') { + if (parent && parent.type === 'object') { + state = State.EXPECT_KEY; + } else if (parent && parent.type === 'array') { + state = State.EXPECT_VALUE; + } + } else if (ch === '}') { + if (parent && parent.type === 'object') { + parent.status = 'complete'; + events.push({ type: 'node-completed', node: parent }); + stack.pop(); + afterValue(); + } + } else if (ch === ']') { + if (parent && parent.type === 'array') { + parent.status = 'complete'; + events.push({ type: 'node-completed', node: parent }); + stack.pop(); + afterValue(); + } + } + break; + } + } + } + + return events; + } + + function getByPath(path: string): JsonNode | null { + if (!root) return null; + if (path === '' || path === '/') return root; + + const segments = path.split('/').filter(Boolean); + let node: JsonNode = root; + + for (const segment of segments) { + if (node.type === 'object') { + const child = (node as JsonObjectNode).children.get(segment); + if (!child) return null; + node = child; + } else if (node.type === 'array') { + const index = parseInt(segment, 10); + const child = (node as JsonArrayNode).children[index]; + if (!child) return null; + node = child; + } else { + return null; + } + } + + return node; + } + + return { + push, + get root() { return root; }, + getByPath, + }; +} +``` + +- [ ] **Step 4: Update barrel export** + +Add to `libs/partial-json/src/index.ts`: + +```ts +export { createPartialJsonParser } from './lib/parser'; +``` + +- [ ] **Step 5: Run tests to verify they pass** + +Run: `npx nx test partial-json` +Expected: ALL PASS + +- [ ] **Step 6: Commit** + +```bash +git add libs/partial-json/ +git commit -m "feat(partial-json): implement character-by-character streaming parser" +``` + +--- + +### Task 4: Partial JSON Parser — Materialization with Structural Sharing + +**Files:** +- Create: `libs/partial-json/src/lib/materialize.ts` +- Create: `libs/partial-json/src/lib/materialize.spec.ts` +- Modify: `libs/partial-json/src/index.ts` + +- [ ] **Step 1: Write failing tests** + +Create `libs/partial-json/src/lib/materialize.spec.ts`: + +```ts +// SPDX-License-Identifier: PolyForm-Noncommercial-1.0.0 +import { describe, it, expect } from 'vitest'; +import { createPartialJsonParser } from './parser'; +import { materialize } from './materialize'; + +describe('materialize', () => { + it('materializes a string node', () => { + const parser = createPartialJsonParser(); + parser.push('"hello"'); + expect(materialize(parser.root!)).toBe('hello'); + }); + + it('materializes a number node', () => { + const parser = createPartialJsonParser(); + parser.push('[42]'); + const arr = materialize(parser.root!) as number[]; + expect(arr[0]).toBe(42); + }); + + it('materializes a boolean', () => { + const parser = createPartialJsonParser(); + parser.push('true'); + expect(materialize(parser.root!)).toBe(true); + }); + + it('materializes null', () => { + const parser = createPartialJsonParser(); + parser.push('null'); + expect(materialize(parser.root!)).toBeNull(); + }); + + it('materializes a simple object', () => { + const parser = createPartialJsonParser(); + parser.push('{"name":"Alice","age":30}'); + expect(materialize(parser.root!)).toEqual({ name: 'Alice', age: 30 }); + }); + + it('materializes an array', () => { + const parser = createPartialJsonParser(); + parser.push('["a","b","c"]'); + expect(materialize(parser.root!)).toEqual(['a', 'b', 'c']); + }); + + it('materializes nested structures', () => { + const parser = createPartialJsonParser(); + parser.push('{"elements":{"r1":{"type":"Text","props":{"label":"Hello"}}}}'); + expect(materialize(parser.root!)).toEqual({ + elements: { r1: { type: 'Text', props: { label: 'Hello' } } }, + }); + }); + + it('materializes partial (streaming) strings', () => { + const parser = createPartialJsonParser(); + parser.push('{"name":"Al'); + const result = materialize(parser.root!) as any; + expect(result.name).toBe('Al'); + }); + + it('materializes partial numbers as null', () => { + const parser = createPartialJsonParser(); + parser.push('{"val":12'); + const result = materialize(parser.root!) as any; + // Number still streaming, raw="12" but value is null until complete + // Materialize should use the raw value parsed as number for usability + expect(result.val).toBe(12); + }); +}); + +describe('materialize — structural sharing', () => { + it('returns same reference for unchanged subtrees', () => { + const parser = createPartialJsonParser(); + parser.push('{"a":{"x":1},"b":{"y":'); + + const result1 = materialize(parser.root!) as any; + const aRef1 = result1.a; + + // Continue streaming into b — a should be unchanged + parser.push('2}}'); + const result2 = materialize(parser.root!) as any; + + // a subtree is complete and unchanged — same reference + expect(result2.a).toBe(aRef1); + // Root must be different (b changed) + expect(result2).not.toBe(result1); + expect(result2.b).toEqual({ y: 2 }); + }); + + it('preserves sibling references when one property changes', () => { + const parser = createPartialJsonParser(); + parser.push('{"elements":{"el-1":{"type":"Text"},"el-2":{"type":"But'); + + const result1 = materialize(parser.root!) as any; + const el1Ref = result1.elements['el-1']; + + parser.push('ton"}}}'); + const result2 = materialize(parser.root!) as any; + + // el-1 unchanged — same reference + expect(result2.elements['el-1']).toBe(el1Ref); + // el-2 changed — different reference + expect(result2.elements['el-2']).toEqual({ type: 'Button' }); + }); + + it('returns same reference when nothing changed', () => { + const parser = createPartialJsonParser(); + parser.push('{"a":1}'); + + const result1 = materialize(parser.root!); + const result2 = materialize(parser.root!); + expect(result2).toBe(result1); + }); +}); +``` + +- [ ] **Step 2: Run tests to verify they fail** + +Run: `npx nx test partial-json` +Expected: FAIL — `materialize` not found + +- [ ] **Step 3: Implement materialization** + +Create `libs/partial-json/src/lib/materialize.ts`: + +```ts +// SPDX-License-Identifier: PolyForm-Noncommercial-1.0.0 +import type { + JsonNode, JsonObjectNode, JsonArrayNode, + JsonStringNode, JsonNumberNode, JsonBooleanNode, +} from './types'; + +/** + * Per-node cache for materialized values. The cache is keyed by node identity + * (the node object itself) and stores { version, value } where version is a + * monotonically increasing counter bumped on any change to the node or descendants. + */ +const cache = new WeakMap<JsonNode, { value: unknown; status: string; childVersion: number }>(); + +/** Global version counter — bumped each time materialize is called. */ +let globalVersion = 0; + +/** + * Recursively materializes a parse tree node into a plain JS value. + * Uses structural sharing: unchanged subtrees return the same reference. + */ +export function materialize(node: JsonNode): unknown { + globalVersion++; + return materializeNode(node); +} + +function materializeNode(node: JsonNode): unknown { + switch (node.type) { + case 'string': + return materializeString(node as JsonStringNode); + case 'number': + return materializeNumber(node as JsonNumberNode); + case 'boolean': + return (node as JsonBooleanNode).value; + case 'null': + return null; + case 'object': + return materializeObject(node as JsonObjectNode); + case 'array': + return materializeArray(node as JsonArrayNode); + } +} + +function materializeString(node: JsonStringNode): string { + return node.value; +} + +function materializeNumber(node: JsonNumberNode): number | null { + if (node.value !== null) return node.value; + // Still streaming — parse what we have + const parsed = Number(node.raw); + return isNaN(parsed) ? null : parsed; +} + +function materializeObject(node: JsonObjectNode): Record<string, unknown> { + // Build a version signature from children's statuses and values + const childVersion = computeObjectChildVersion(node); + const cached = cache.get(node); + + if (cached && cached.status === node.status && cached.childVersion === childVersion) { + return cached.value as Record<string, unknown>; + } + + const result: Record<string, unknown> = {}; + for (const [key, child] of node.children) { + result[key] = materializeNode(child); + } + + cache.set(node, { value: result, status: node.status, childVersion }); + return result; +} + +function materializeArray(node: JsonArrayNode): unknown[] { + const childVersion = computeArrayChildVersion(node); + const cached = cache.get(node); + + if (cached && cached.status === node.status && cached.childVersion === childVersion) { + return cached.value as unknown[]; + } + + const result = node.children.map(child => materializeNode(child)); + cache.set(node, { value: result, status: node.status, childVersion }); + return result; +} + +/** + * Computes a lightweight version hash for an object's children. + * Uses a combination of child count, statuses, and for leaf nodes, their values. + */ +function computeObjectChildVersion(node: JsonObjectNode): number { + let hash = node.children.size; + for (const [, child] of node.children) { + hash = (hash * 31 + computeNodeVersion(child)) | 0; + } + return hash; +} + +function computeArrayChildVersion(node: JsonArrayNode): number { + let hash = node.children.length; + for (const child of node.children) { + hash = (hash * 31 + computeNodeVersion(child)) | 0; + } + return hash; +} + +function computeNodeVersion(node: JsonNode): number { + switch (node.type) { + case 'string': { + const s = (node as JsonStringNode).value; + // Use length + last few chars as a fast hash + return (s.length * 31 + (s.charCodeAt(s.length - 1) || 0)) | 0; + } + case 'number': { + const n = node as JsonNumberNode; + return n.value !== null ? (n.value * 1000) | 0 : n.raw.length; + } + case 'boolean': + return (node as JsonBooleanNode).value ? 1 : 0; + case 'null': + return 0; + case 'object': + return computeObjectChildVersion(node as JsonObjectNode); + case 'array': + return computeArrayChildVersion(node as JsonArrayNode); + } +} +``` + +- [ ] **Step 4: Update barrel export** + +Add to `libs/partial-json/src/index.ts`: + +```ts +export { materialize } from './lib/materialize'; +``` + +- [ ] **Step 5: Run tests to verify they pass** + +Run: `npx nx test partial-json` +Expected: ALL PASS + +- [ ] **Step 6: Commit** + +```bash +git add libs/partial-json/ +git commit -m "feat(partial-json): add materialization with structural sharing" +``` + +--- + +### Task 5: Render Lib — Element-Level Memoization + +**Files:** +- Modify: `libs/render/src/lib/render-element.component.ts:68-72` +- Modify: `libs/render/src/lib/render-element.component.spec.ts` + +- [ ] **Step 1: Write failing test for memoization** + +Add to the end of `libs/render/src/lib/render-element.component.spec.ts`, inside a new `describe` block: + +```ts +describe('RenderElementComponent — element-level memoization', () => { + it('element() returns same reference when spec changes but element is unchanged', () => { + TestBed.configureTestingModule({}); + TestBed.runInInjectionContext(() => { + const { signal, computed } = require('@angular/core'); + + // Simulate two spec snapshots with structural sharing: + // el-1 is the same reference in both, el-2 is different + const sharedEl1 = { type: 'Text', props: { label: 'Same' } }; + const spec1 = createSpec({ + root: { type: 'Container', props: {}, children: ['el-1', 'el-2'] }, + 'el-1': sharedEl1, + 'el-2': { type: 'Text', props: { label: 'Old' } }, + }); + + // spec2 reuses the same el-1 reference (structural sharing) + const spec2 = { + ...spec1, + elements: { + ...spec1.elements, + 'el-2': { type: 'Text', props: { label: 'New' } }, + }, + } as Spec; + // el-1 is the SAME object reference + expect(spec2.elements['el-1']).toBe(spec1.elements['el-1']); + + // Simulate what the component does: computed with Object.is equality + const specSignal = signal(spec1); + const elementKey = signal('el-1'); + const element = computed( + () => specSignal()?.elements?.[elementKey()], + { equal: Object.is }, + ); + + const ref1 = element(); + expect(ref1).toBe(sharedEl1); + + // Update spec — el-1 reference unchanged + specSignal.set(spec2); + const ref2 = element(); + // With Object.is equality, computed should return same reference + expect(ref2).toBe(ref1); + }); + }); +}); +``` + +- [ ] **Step 2: Run tests to verify baseline passes (this test should pass even before the change since we're testing the pattern)** + +Run: `npx nx test render` +Expected: PASS — the test validates the computed pattern itself + +- [ ] **Step 3: Add `equal: Object.is` to the element() computed** + +In `libs/render/src/lib/render-element.component.ts`, change line 68-72: + +```ts + /** The UIElement definition from the spec. */ + readonly element: Signal<UIElement | undefined> = computed( + () => this.spec()?.elements?.[this.elementKey()], + { equal: Object.is }, + ); +``` + +- [ ] **Step 4: Run tests to verify all pass** + +Run: `npx nx test render` +Expected: ALL PASS + +- [ ] **Step 5: Commit** + +```bash +git add libs/render/ +git commit -m "perf(render): add element-level memoization via Object.is equality" +``` + +--- + +### Task 6: ParseTreeStore — Bridge Parse Tree to Spec Signal + +**Files:** +- Create: `libs/chat/src/lib/streaming/parse-tree-store.ts` +- Create: `libs/chat/src/lib/streaming/parse-tree-store.spec.ts` +- Modify: `libs/chat/src/public-api.ts` + +- [ ] **Step 1: Write failing tests** + +Create `libs/chat/src/lib/streaming/parse-tree-store.spec.ts`: + +```ts +// SPDX-License-Identifier: PolyForm-Noncommercial-1.0.0 +import { describe, it, expect } from 'vitest'; +import { TestBed } from '@angular/core/testing'; +import { createParseTreeStore } from './parse-tree-store'; +import { createPartialJsonParser } from '@cacheplane/partial-json'; + +describe('createParseTreeStore', () => { + it('spec is null initially', () => { + TestBed.configureTestingModule({}); + TestBed.runInInjectionContext(() => { + const parser = createPartialJsonParser(); + const store = createParseTreeStore(parser); + expect(store.spec()).toBeNull(); + }); + }); + + it('materializes a complete spec from streamed JSON', () => { + TestBed.configureTestingModule({}); + TestBed.runInInjectionContext(() => { + const parser = createPartialJsonParser(); + const store = createParseTreeStore(parser); + + const json = '{"root":"r1","elements":{"r1":{"type":"Text","props":{"label":"Hello"}}}}'; + store.push(json); + + const spec = store.spec(); + expect(spec).not.toBeNull(); + expect(spec!.root).toBe('r1'); + expect(spec!.elements['r1'].type).toBe('Text'); + expect(spec!.elements['r1'].props!['label']).toBe('Hello'); + }); + }); + + it('updates spec incrementally as tokens stream', () => { + TestBed.configureTestingModule({}); + TestBed.runInInjectionContext(() => { + const parser = createPartialJsonParser(); + const store = createParseTreeStore(parser); + + store.push('{"root":"r1","elements":{"r1":{"type":"Te'); + const spec1 = store.spec(); + expect(spec1).not.toBeNull(); + expect(spec1!.elements['r1'].type).toBe('Te'); + + store.push('xt","props":{"label":"Hello"}}}}'); + const spec2 = store.spec(); + expect(spec2!.elements['r1'].type).toBe('Text'); + expect(spec2!.elements['r1'].props!['label']).toBe('Hello'); + }); + }); + + it('preserves structural sharing for unchanged elements', () => { + TestBed.configureTestingModule({}); + TestBed.runInInjectionContext(() => { + const parser = createPartialJsonParser(); + const store = createParseTreeStore(parser); + + store.push('{"root":"r1","elements":{"r1":{"type":"Text","props":{"label":"Done"}},"r2":{"type":"But'); + const spec1 = store.spec(); + const el1Ref = spec1!.elements['r1']; + + store.push('ton","props":{"label":"Click"}}}}'); + const spec2 = store.spec(); + + // r1 was complete before r2 started streaming — reference should be preserved + expect(spec2!.elements['r1']).toBe(el1Ref); + }); + }); + + it('tracks element accumulation states', () => { + TestBed.configureTestingModule({}); + TestBed.runInInjectionContext(() => { + const parser = createPartialJsonParser(); + const store = createParseTreeStore(parser); + + store.push('{"root":"r1","elements":{"r1":{"type":"Text"'); + const states = store.elementStates(); + expect(states.get('r1')).toBeDefined(); + expect(states.get('r1')!.hasType).toBe(true); + expect(states.get('r1')!.hasProps).toBe(false); + + store.push(',"props":{"label":"Hi"}}}}'); + const states2 = store.elementStates(); + expect(states2.get('r1')!.hasType).toBe(true); + expect(states2.get('r1')!.hasProps).toBe(true); + }); + }); +}); +``` + +- [ ] **Step 2: Run tests to verify they fail** + +Run: `npx nx test chat -- --testPathPattern=parse-tree-store` +Expected: FAIL — `createParseTreeStore` not found + +- [ ] **Step 3: Implement ParseTreeStore** + +Create `libs/chat/src/lib/streaming/parse-tree-store.ts`: + +```ts +// SPDX-License-Identifier: PolyForm-Noncommercial-1.0.0 +import { signal, type Signal } from '@angular/core'; +import type { Spec } from '@json-render/core'; +import type { PartialJsonParser, JsonObjectNode } from '@cacheplane/partial-json'; +import { materialize } from '@cacheplane/partial-json'; + +export interface ElementAccumulationState { + hasType: boolean; + hasProps: boolean; + hasChildren: boolean; + streaming: boolean; +} + +export interface ParseTreeStore { + /** Push characters to the parser and update signals. */ + push(chunk: string): void; + /** Current materialized spec (structurally shared between updates). */ + readonly spec: Signal<Spec | null>; + /** Per-element accumulation tracking. */ + readonly elementStates: Signal<Map<string, ElementAccumulationState>>; +} + +export function createParseTreeStore(parser: PartialJsonParser): ParseTreeStore { + const specSignal = signal<Spec | null>(null); + const elementStatesSignal = signal<Map<string, ElementAccumulationState>>(new Map()); + + function push(chunk: string): void { + const events = parser.push(chunk); + if (!parser.root || events.length === 0) return; + + // Materialize the full tree with structural sharing + const raw = materialize(parser.root); + if (raw && typeof raw === 'object' && !Array.isArray(raw)) { + specSignal.set(raw as unknown as Spec); + } + + // Update element accumulation states + updateElementStates(); + } + + function updateElementStates(): void { + if (!parser.root || parser.root.type !== 'object') return; + const rootObj = parser.root as JsonObjectNode; + const elementsNode = rootObj.children.get('elements'); + if (!elementsNode || elementsNode.type !== 'object') return; + + const states = new Map<string, ElementAccumulationState>(); + const elementsObj = elementsNode as JsonObjectNode; + + for (const [key, node] of elementsObj.children) { + if (node.type !== 'object') continue; + const elObj = node as JsonObjectNode; + + states.set(key, { + hasType: elObj.children.has('type'), + hasProps: elObj.children.has('props'), + hasChildren: elObj.children.has('children'), + streaming: node.status !== 'complete', + }); + } + + elementStatesSignal.set(states); + } + + return { + push, + spec: specSignal.asReadonly(), + elementStates: elementStatesSignal.asReadonly(), + }; +} +``` + +- [ ] **Step 4: Run tests to verify they pass** + +Run: `npx nx test chat -- --testPathPattern=parse-tree-store` +Expected: ALL PASS + +- [ ] **Step 5: Commit** + +```bash +git add libs/chat/src/lib/streaming/ +git commit -m "feat(chat): add ParseTreeStore bridging parse tree to Spec signals" +``` + +--- + +### Task 7: ContentClassifier — Content Type Detection and Routing + +**Files:** +- Create: `libs/chat/src/lib/streaming/content-classifier.ts` +- Create: `libs/chat/src/lib/streaming/content-classifier.spec.ts` + +- [ ] **Step 1: Write failing tests** + +Create `libs/chat/src/lib/streaming/content-classifier.spec.ts`: + +```ts +// SPDX-License-Identifier: PolyForm-Noncommercial-1.0.0 +import { describe, it, expect } from 'vitest'; +import { TestBed } from '@angular/core/testing'; +import { createContentClassifier, type ContentClassifier } from './content-classifier'; + +describe('createContentClassifier', () => { + function setup(): ContentClassifier { + let classifier!: ContentClassifier; + TestBed.configureTestingModule({}); + TestBed.runInInjectionContext(() => { + classifier = createContentClassifier(); + }); + return classifier; + } + + describe('initial state', () => { + it('type is undetermined', () => { + const c = setup(); + expect(c.type()).toBe('undetermined'); + }); + + it('markdown is empty', () => { + const c = setup(); + expect(c.markdown()).toBe(''); + }); + + it('spec is null', () => { + const c = setup(); + expect(c.spec()).toBeNull(); + }); + }); + + describe('markdown detection', () => { + it('detects plain text as markdown', () => { + const c = setup(); + c.update('Hello, world!'); + expect(c.type()).toBe('markdown'); + expect(c.markdown()).toBe('Hello, world!'); + }); + + it('accumulates markdown across updates', () => { + const c = setup(); + c.update('Hello'); + c.update('Hello, world'); + expect(c.markdown()).toBe('Hello, world'); + }); + }); + + describe('json-render detection', () => { + it('detects leading { as json-render', () => { + const c = setup(); + c.update('{"root":'); + expect(c.type()).toBe('json-render'); + }); + + it('detects { with leading whitespace', () => { + const c = setup(); + c.update(' \n{"root":'); + expect(c.type()).toBe('json-render'); + }); + + it('produces a spec from streamed JSON', () => { + const c = setup(); + c.update('{"root":"r1","elements":{"r1":{"type":"Text","props":{"label":"Hi"}}}}'); + expect(c.spec()).not.toBeNull(); + expect(c.spec()!.root).toBe('r1'); + expect(c.spec()!.elements['r1'].type).toBe('Text'); + }); + + it('streams spec incrementally', () => { + const c = setup(); + c.update('{"root":"r1","elements":{"r1":{"type":"Te'); + expect(c.spec()!.elements['r1'].type).toBe('Te'); + + c.update('{"root":"r1","elements":{"r1":{"type":"Text","props":{"label":"Hello"}}}}'); + expect(c.spec()!.elements['r1'].type).toBe('Text'); + }); + + it('markdown is empty for pure JSON', () => { + const c = setup(); + c.update('{"root":"r1"}'); + expect(c.markdown()).toBe(''); + }); + }); + + describe('delta processing', () => { + it('only processes new characters on each update', () => { + const c = setup(); + c.update('Hello'); + c.update('Hello, world'); + // Should not double-process "Hello" + expect(c.markdown()).toBe('Hello, world'); + }); + + it('handles empty delta gracefully', () => { + const c = setup(); + c.update('Hello'); + c.update('Hello'); // Same content — no delta + expect(c.markdown()).toBe('Hello'); + }); + }); + + describe('type transitions', () => { + it('type never downgrades', () => { + const c = setup(); + c.update('Hello'); + expect(c.type()).toBe('markdown'); + // Even if we could somehow see JSON later, type doesn't go back to undetermined + }); + }); + + describe('streaming state', () => { + it('streaming is true while content is arriving', () => { + const c = setup(); + c.update('{"root":"r1"'); + expect(c.streaming()).toBe(true); + }); + + it('streaming becomes false after complete JSON', () => { + const c = setup(); + c.update('{"root":"r1"}'); + // Parser has complete JSON + expect(c.streaming()).toBe(false); + }); + }); + + describe('dispose', () => { + it('can be called without errors', () => { + const c = setup(); + c.update('Hello'); + expect(() => c.dispose()).not.toThrow(); + }); + }); +}); +``` + +- [ ] **Step 2: Run tests to verify they fail** + +Run: `npx nx test chat -- --testPathPattern=content-classifier` +Expected: FAIL — `createContentClassifier` not found + +- [ ] **Step 3: Implement ContentClassifier** + +Create `libs/chat/src/lib/streaming/content-classifier.ts`: + +```ts +// SPDX-License-Identifier: PolyForm-Noncommercial-1.0.0 +import { signal, type Signal } from '@angular/core'; +import type { Spec } from '@json-render/core'; +import { createPartialJsonParser } from '@cacheplane/partial-json'; +import { createParseTreeStore, type ElementAccumulationState } from './parse-tree-store'; + +export type ContentType = 'undetermined' | 'markdown' | 'json-render' | 'a2ui' | 'mixed'; + +export interface ContentClassifier { + /** Feed the full message content snapshot. Internally computes delta. */ + update(content: string): void; + + /** Reactive signals for classified output. */ + readonly type: Signal<ContentType>; + readonly markdown: Signal<string>; + readonly spec: Signal<Spec | null>; + readonly elementStates: Signal<Map<string, ElementAccumulationState>>; + readonly streaming: Signal<boolean>; + + dispose(): void; +} + +type DetectionState = 'undetermined' | 'markdown' | 'json-render-partial' | 'a2ui'; + +export function createContentClassifier(): ContentClassifier { + const typeSignal = signal<ContentType>('undetermined'); + const markdownSignal = signal<string>(''); + const streamingSignal = signal<boolean>(true); + + let processedLength = 0; + let detectionState: DetectionState = 'undetermined'; + + // Lazy-init: only created when JSON is detected + const parser = createPartialJsonParser(); + const store = createParseTreeStore(parser); + let jsonDetected = false; + + function detect(content: string): void { + // Find first non-whitespace character + for (let i = 0; i < content.length; i++) { + const ch = content[i]; + if (ch === ' ' || ch === '\t' || ch === '\n' || ch === '\r') continue; + + if (ch === '{') { + detectionState = 'json-render-partial'; + typeSignal.set('json-render'); + jsonDetected = true; + // Feed everything from the start (including whitespace, the parser handles it) + store.push(content); + processedLength = content.length; + return; + } + + // Check for A2UI delimiter + if (content.startsWith('---a2ui_JSON---', i)) { + detectionState = 'a2ui'; + typeSignal.set('a2ui'); + processedLength = content.length; + return; + } + + // Any other character = markdown + detectionState = 'markdown'; + typeSignal.set('markdown'); + markdownSignal.set(content); + processedLength = content.length; + return; + } + // All whitespace so far — stay undetermined + } + + function update(content: string): void { + if (content.length <= processedLength && detectionState !== 'undetermined') { + return; + } + + if (detectionState === 'undetermined') { + detect(content); + return; + } + + const delta = content.slice(processedLength); + if (!delta.length) return; + processedLength = content.length; + + switch (detectionState) { + case 'markdown': + markdownSignal.set(content); + break; + case 'json-render-partial': + store.push(delta); + // Check if parsing is complete + if (parser.root && parser.root.status === 'complete') { + streamingSignal.set(false); + } + break; + case 'a2ui': + // A2UI accumulation (future) + break; + } + } + + function dispose(): void { + // Clean up resources if needed + } + + return { + update, + type: typeSignal.asReadonly(), + markdown: markdownSignal.asReadonly(), + spec: store.spec, + elementStates: store.elementStates, + streaming: streamingSignal.asReadonly(), + dispose, + }; +} +``` + +- [ ] **Step 4: Run tests to verify they pass** + +Run: `npx nx test chat -- --testPathPattern=content-classifier` +Expected: ALL PASS + +- [ ] **Step 5: Commit** + +```bash +git add libs/chat/src/lib/streaming/ +git commit -m "feat(chat): add ContentClassifier for streaming content type detection" +``` + +--- + +### Task 8: Chat Component Integration + +**Files:** +- Modify: `libs/chat/src/lib/compositions/chat/chat.component.ts` +- Modify: `libs/chat/src/lib/compositions/chat/chat.component.spec.ts` +- Modify: `libs/chat/src/public-api.ts` + +**Context:** The current `ChatComponent` uses `AgentRef` (from `@cacheplane/angular`), has `views` input of type `ViewRegistry`, and `store` input of type `StateStore`. The AI message template uses `flex gap-3` with inline avatar (ChatGPT pattern, no "Assistant" label). `ChatGenerativeUiComponent` takes `AngularRegistry` — use `toRenderRegistry()` to convert from `ViewRegistry`. + +- [ ] **Step 1: Write failing tests for classified rendering** + +Create or update `libs/chat/src/lib/compositions/chat/chat.component.spec.ts`: + +```ts +// SPDX-License-Identifier: PolyForm-Noncommercial-1.0.0 +import { describe, it, expect } from 'vitest'; +import { TestBed } from '@angular/core/testing'; +import { createContentClassifier, type ContentClassifier } from '../../streaming/content-classifier'; + +describe('ChatComponent — content classification', () => { + it('classifyMessage creates a classifier on first call and caches it', () => { + TestBed.configureTestingModule({}); + TestBed.runInInjectionContext(() => { + const classifiers = new Map<number, ContentClassifier>(); + + function classifyMessage(content: string, index: number): ContentClassifier { + let classifier = classifiers.get(index); + if (!classifier) { + classifier = createContentClassifier(); + classifiers.set(index, classifier); + } + classifier.update(content); + return classifier; + } + + const c1 = classifyMessage('Hello', 0); + const c2 = classifyMessage('Hello, world', 0); + expect(c2).toBe(c1); // Same instance, cached + expect(c1.markdown()).toBe('Hello, world'); + }); + }); + + it('different message indices get different classifiers', () => { + TestBed.configureTestingModule({}); + TestBed.runInInjectionContext(() => { + const classifiers = new Map<number, ContentClassifier>(); + + function classifyMessage(content: string, index: number): ContentClassifier { + let classifier = classifiers.get(index); + if (!classifier) { + classifier = createContentClassifier(); + classifiers.set(index, classifier); + } + classifier.update(content); + return classifier; + } + + const c0 = classifyMessage('Hello', 0); + const c1 = classifyMessage('{"root":"r1"}', 1); + expect(c0.type()).toBe('markdown'); + expect(c1.type()).toBe('json-render'); + }); + }); + + it('markdown messages use the fast path (no spec)', () => { + TestBed.configureTestingModule({}); + TestBed.runInInjectionContext(() => { + const c = createContentClassifier(); + c.update('Just plain markdown text'); + expect(c.type()).toBe('markdown'); + expect(c.spec()).toBeNull(); + expect(c.markdown()).toBe('Just plain markdown text'); + }); + }); + + it('JSON messages produce a spec and no markdown', () => { + TestBed.configureTestingModule({}); + TestBed.runInInjectionContext(() => { + const c = createContentClassifier(); + c.update('{"root":"r1","elements":{"r1":{"type":"Text","props":{"label":"Hi"}}}}'); + expect(c.type()).toBe('json-render'); + expect(c.spec()).not.toBeNull(); + expect(c.markdown()).toBe(''); + }); + }); +}); +``` + +- [ ] **Step 2: Run tests to verify they pass (these test the pattern, not the component template)** + +Run: `npx nx test chat -- --testPathPattern=chat.component` +Expected: PASS + +- [ ] **Step 3: Update ChatComponent template and logic** + +Modify `libs/chat/src/lib/compositions/chat/chat.component.ts`: + +Add imports at the top: + +```ts +import { ChatGenerativeUiComponent } from '../../primitives/chat-generative-ui/chat-generative-ui.component'; +import { toRenderRegistry } from '@cacheplane/render'; +import { createContentClassifier, type ContentClassifier } from '../../streaming/content-classifier'; +``` + +Add `ChatGenerativeUiComponent` to the `imports` array. + +Add to the component class: + +```ts + private readonly classifiers = new Map<number, ContentClassifier>(); + + /** Convert ViewRegistry → AngularRegistry for ChatGenerativeUiComponent. */ + readonly renderRegistry = computed(() => { + const v = this.views(); + return v ? toRenderRegistry(v) : undefined; + }); + + classifyMessage(content: string, index: number): ContentClassifier { + let classifier = this.classifiers.get(index); + if (!classifier) { + classifier = createContentClassifier(); + this.classifiers.set(index, classifier); + } + classifier.update(content); + return classifier; + } + + clearClassifiers(): void { + for (const [, c] of this.classifiers) { + c.dispose(); + } + this.classifiers.clear(); + } +``` + +Replace the AI message template (lines 112-125) with: + +```html + <!-- AI messages: classified rendering (markdown + generative UI) --> + <ng-template chatMessageTemplate="ai" let-message let-index="index"> + @let content = messageContent(message); + @let classified = classifyMessage(content, index); + <div class="flex gap-3"> + <div + class="w-7 h-7 flex items-center justify-center text-xs font-semibold shrink-0 mt-0.5" + style="background: var(--chat-avatar-bg); color: var(--chat-avatar-text); border-radius: var(--chat-radius-avatar);" + >A</div> + <div class="flex-1 min-w-0 flex flex-col gap-2"> + @if (classified.markdown(); as md) { + <div + class="chat-md break-words text-[length:var(--chat-font-size)] leading-[var(--chat-line-height)]" + style="color: var(--chat-text);" + [innerHTML]="renderMd(md)" + ></div> + } + + @if (classified.spec(); as spec) { + <chat-generative-ui + [spec]="spec" + [registry]="renderRegistry()" + [store]="store()" + [loading]="ref().isLoading()" + /> + } + </div> + </div> + </ng-template> +``` + +- [ ] **Step 4: Run all chat tests** + +Run: `npx nx test chat` +Expected: ALL PASS + +- [ ] **Step 5: Update public-api.ts exports** + +Add to `libs/chat/src/public-api.ts`: + +```ts +// Streaming / Generative UI +export { createContentClassifier } from './lib/streaming/content-classifier'; +export type { ContentClassifier, ContentType } from './lib/streaming/content-classifier'; +export { createParseTreeStore } from './lib/streaming/parse-tree-store'; +export type { ParseTreeStore, ElementAccumulationState } from './lib/streaming/parse-tree-store'; +``` + +- [ ] **Step 6: Run all tests across affected libs** + +Run: `npx nx run-many -t test -p partial-json render chat` +Expected: ALL PASS + +- [ ] **Step 7: Commit** + +```bash +git add libs/chat/ libs/partial-json/ +git commit -m "feat(chat): integrate content classifier and generative UI rendering" +``` + +--- + +### Task 9: Final Verification and Lint + +**Files:** +- All modified files + +- [ ] **Step 1: Run lint across all affected projects** + +Run: `npx nx run-many -t lint -p partial-json render chat` +Expected: PASS (fix any lint errors if found) + +- [ ] **Step 2: Run all tests one final time** + +Run: `npx nx run-many -t test -p partial-json render chat` +Expected: ALL PASS + +- [ ] **Step 3: Run build for all affected projects** + +Run: `npx nx run-many -t build -p partial-json render chat` +Expected: PASS + +- [ ] **Step 4: Commit any lint/build fixes** + +Only if needed: + +```bash +git add -A +git commit -m "fix: address lint and build issues" +``` From c94a973b0ac04dd8518bd63c69325dfd23a1d1e1 Mon Sep 17 00:00:00 2001 From: Brian Love <brian@liveloveapp.com> Date: Wed, 8 Apr 2026 12:50:05 -0700 Subject: [PATCH 05/13] chore: scaffold @cacheplane/partial-json library Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com> --- libs/partial-json/package.json | 6 ++++++ libs/partial-json/project.json | 27 +++++++++++++++++++++++++++ libs/partial-json/src/index.ts | 1 + libs/partial-json/tsconfig.json | 7 +++++++ libs/partial-json/tsconfig.lib.json | 9 +++++++++ libs/partial-json/vite.config.mts | 12 ++++++++++++ tsconfig.base.json | 3 ++- 7 files changed, 64 insertions(+), 1 deletion(-) create mode 100644 libs/partial-json/package.json create mode 100644 libs/partial-json/project.json create mode 100644 libs/partial-json/src/index.ts create mode 100644 libs/partial-json/tsconfig.json create mode 100644 libs/partial-json/tsconfig.lib.json create mode 100644 libs/partial-json/vite.config.mts diff --git a/libs/partial-json/package.json b/libs/partial-json/package.json new file mode 100644 index 000000000..1ead586fd --- /dev/null +++ b/libs/partial-json/package.json @@ -0,0 +1,6 @@ +{ + "name": "@cacheplane/partial-json", + "version": "0.0.1", + "license": "PolyForm-Noncommercial-1.0.0", + "sideEffects": false +} diff --git a/libs/partial-json/project.json b/libs/partial-json/project.json new file mode 100644 index 000000000..009be76c2 --- /dev/null +++ b/libs/partial-json/project.json @@ -0,0 +1,27 @@ +{ + "name": "partial-json", + "$schema": "../../node_modules/nx/schemas/project-schema.json", + "sourceRoot": "libs/partial-json/src", + "projectType": "library", + "tags": ["scope:shared", "type:lib"], + "targets": { + "build": { + "executor": "@nx/js:tsc", + "outputs": ["{workspaceRoot}/dist/libs/partial-json"], + "options": { + "outputPath": "dist/libs/partial-json", + "main": "libs/partial-json/src/index.ts", + "tsConfig": "libs/partial-json/tsconfig.lib.json" + } + }, + "lint": { + "executor": "@nx/eslint:lint" + }, + "test": { + "executor": "@nx/vite:test", + "options": { + "configFile": "libs/partial-json/vite.config.mts" + } + } + } +} diff --git a/libs/partial-json/src/index.ts b/libs/partial-json/src/index.ts new file mode 100644 index 000000000..9a0d4b13d --- /dev/null +++ b/libs/partial-json/src/index.ts @@ -0,0 +1 @@ +// Public API — populated as modules are added diff --git a/libs/partial-json/tsconfig.json b/libs/partial-json/tsconfig.json new file mode 100644 index 000000000..1645f314f --- /dev/null +++ b/libs/partial-json/tsconfig.json @@ -0,0 +1,7 @@ +{ + "extends": "../../tsconfig.base.json", + "files": [], + "references": [ + { "path": "./tsconfig.lib.json" } + ] +} diff --git a/libs/partial-json/tsconfig.lib.json b/libs/partial-json/tsconfig.lib.json new file mode 100644 index 000000000..643573425 --- /dev/null +++ b/libs/partial-json/tsconfig.lib.json @@ -0,0 +1,9 @@ +{ + "extends": "./tsconfig.json", + "compilerOptions": { + "outDir": "../../dist/out-tsc", + "declaration": true + }, + "include": ["src/**/*.ts"], + "exclude": ["src/**/*.spec.ts"] +} diff --git a/libs/partial-json/vite.config.mts b/libs/partial-json/vite.config.mts new file mode 100644 index 000000000..971c722be --- /dev/null +++ b/libs/partial-json/vite.config.mts @@ -0,0 +1,12 @@ +import { defineConfig } from 'vite'; +import { nxViteTsPaths } from '@nx/vite/plugins/nx-tsconfig-paths.plugin'; + +export default defineConfig({ + plugins: [nxViteTsPaths()], + test: { + environment: 'node', + globals: true, + include: ['src/**/*.spec.ts'], + passWithNoTests: true, + }, +}); diff --git a/tsconfig.base.json b/tsconfig.base.json index 1854863e7..22167ee00 100644 --- a/tsconfig.base.json +++ b/tsconfig.base.json @@ -25,7 +25,8 @@ ], "@cacheplane/angular": ["libs/agent/src/public-api.ts"], "@cacheplane/render": ["libs/render/src/public-api.ts"], - "@cacheplane/chat": ["libs/chat/src/public-api.ts"] + "@cacheplane/chat": ["libs/chat/src/public-api.ts"], + "@cacheplane/partial-json": ["libs/partial-json/src/index.ts"] }, "skipLibCheck": true, "strict": true, From 93d09a507023a58c10fa9050a0c025f523db92d3 Mon Sep 17 00:00:00 2001 From: Brian Love <brian@liveloveapp.com> Date: Wed, 8 Apr 2026 12:50:32 -0700 Subject: [PATCH 06/13] feat(partial-json): add node types and parser interface Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com> --- libs/partial-json/src/index.ts | 8 ++- libs/partial-json/src/lib/types.ts | 82 ++++++++++++++++++++++++++++++ 2 files changed, 89 insertions(+), 1 deletion(-) create mode 100644 libs/partial-json/src/lib/types.ts diff --git a/libs/partial-json/src/index.ts b/libs/partial-json/src/index.ts index 9a0d4b13d..6a8683591 100644 --- a/libs/partial-json/src/index.ts +++ b/libs/partial-json/src/index.ts @@ -1 +1,7 @@ -// Public API — populated as modules are added +// SPDX-License-Identifier: PolyForm-Noncommercial-1.0.0 +export type { + JsonNodeType, JsonNodeStatus, JsonNodeBase, + JsonObjectNode, JsonArrayNode, JsonStringNode, + JsonNumberNode, JsonBooleanNode, JsonNullNode, + JsonNode, ParseEvent, PartialJsonParser, +} from './lib/types'; diff --git a/libs/partial-json/src/lib/types.ts b/libs/partial-json/src/lib/types.ts new file mode 100644 index 000000000..f49f7e0d3 --- /dev/null +++ b/libs/partial-json/src/lib/types.ts @@ -0,0 +1,82 @@ +// SPDX-License-Identifier: PolyForm-Noncommercial-1.0.0 + +/** Kinds of JSON values a node can represent. */ +export type JsonNodeType = 'object' | 'array' | 'string' | 'number' | 'boolean' | 'null'; + +/** Parsing state of a node. */ +export type JsonNodeStatus = 'pending' | 'streaming' | 'complete'; + +/** Base shape shared by all nodes. */ +export interface JsonNodeBase { + /** Stable identity — assigned on creation, never changes. */ + readonly id: number; + /** What kind of JSON value this node represents. */ + readonly type: JsonNodeType; + /** Parsing state. */ + status: JsonNodeStatus; + /** Parent node (null for root). */ + parent: JsonNode | null; + /** Key in parent — string for object properties, number for array indices. */ + key: string | number | null; +} + +export interface JsonObjectNode extends JsonNodeBase { + readonly type: 'object'; + children: Map<string, JsonNode>; + /** Key currently being built (between quote open and colon). */ + pendingKey: string | null; +} + +export interface JsonArrayNode extends JsonNodeBase { + readonly type: 'array'; + children: JsonNode[]; +} + +export interface JsonStringNode extends JsonNodeBase { + readonly type: 'string'; + /** Grows character-by-character as tokens arrive. */ + value: string; +} + +export interface JsonNumberNode extends JsonNodeBase { + readonly type: 'number'; + /** Raw characters accumulated so far. */ + raw: string; + /** Parsed value — set when node completes. */ + value: number | null; +} + +export interface JsonBooleanNode extends JsonNodeBase { + readonly type: 'boolean'; + value: boolean; +} + +export interface JsonNullNode extends JsonNodeBase { + readonly type: 'null'; +} + +export type JsonNode = + | JsonObjectNode + | JsonArrayNode + | JsonStringNode + | JsonNumberNode + | JsonBooleanNode + | JsonNullNode; + +/** Events emitted by the parser as the tree changes. */ +export interface ParseEvent { + type: 'node-created' | 'value-updated' | 'node-completed'; + node: JsonNode; + /** For value-updated on strings: the characters appended this push. */ + delta?: string; +} + +/** Push-based streaming JSON parser. */ +export interface PartialJsonParser { + /** Feed characters. Returns events for what changed. */ + push(chunk: string): ParseEvent[]; + /** Root node of the parse tree. */ + readonly root: JsonNode | null; + /** Look up a node by JSON Pointer path (e.g., "/elements/el-1/props"). */ + getByPath(path: string): JsonNode | null; +} From 7456a4e843b586c090d43da9dfccf18cd7e7fd5a Mon Sep 17 00:00:00 2001 From: Brian Love <brian@liveloveapp.com> Date: Wed, 8 Apr 2026 12:58:08 -0700 Subject: [PATCH 07/13] feat(partial-json): implement character-by-character streaming parser State-machine parser that processes JSON one character at a time, emitting node-created/value-updated/node-completed events as the tree grows. Supports strings, numbers, booleans, null, objects, arrays, escape sequences, unicode, nested structures, stable node identities, and JSON Pointer path lookup. Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com> --- libs/partial-json/src/index.ts | 1 + libs/partial-json/src/lib/parser.spec.ts | 335 ++++++++++++++++ libs/partial-json/src/lib/parser.ts | 472 +++++++++++++++++++++++ 3 files changed, 808 insertions(+) create mode 100644 libs/partial-json/src/lib/parser.spec.ts create mode 100644 libs/partial-json/src/lib/parser.ts diff --git a/libs/partial-json/src/index.ts b/libs/partial-json/src/index.ts index 6a8683591..ae94ec330 100644 --- a/libs/partial-json/src/index.ts +++ b/libs/partial-json/src/index.ts @@ -5,3 +5,4 @@ export type { JsonNumberNode, JsonBooleanNode, JsonNullNode, JsonNode, ParseEvent, PartialJsonParser, } from './lib/types'; +export { createPartialJsonParser } from './lib/parser'; diff --git a/libs/partial-json/src/lib/parser.spec.ts b/libs/partial-json/src/lib/parser.spec.ts new file mode 100644 index 000000000..7a0540bf1 --- /dev/null +++ b/libs/partial-json/src/lib/parser.spec.ts @@ -0,0 +1,335 @@ +// SPDX-License-Identifier: PolyForm-Noncommercial-1.0.0 +import { describe, it, expect } from 'vitest'; +import { createPartialJsonParser } from './parser'; +import type { + JsonStringNode, + JsonNumberNode, + JsonBooleanNode, + JsonNullNode, + JsonObjectNode, + JsonArrayNode, + ParseEvent, +} from './types'; + +describe('createPartialJsonParser', () => { + describe('strings', () => { + it('should parse a complete string', () => { + const parser = createPartialJsonParser(); + const events = parser.push('"hello"'); + const root = parser.root as JsonStringNode; + expect(root).not.toBeNull(); + expect(root.type).toBe('string'); + expect(root.value).toBe('hello'); + expect(root.status).toBe('complete'); + }); + + it('should stream a string character-by-character', () => { + const parser = createPartialJsonParser(); + parser.push('"'); + parser.push('h'); + parser.push('e'); + parser.push('l'); + parser.push('l'); + parser.push('o'); + parser.push('"'); + const root = parser.root as JsonStringNode; + expect(root.type).toBe('string'); + expect(root.value).toBe('hello'); + expect(root.status).toBe('complete'); + }); + + it('should emit value-updated events with delta for strings', () => { + const parser = createPartialJsonParser(); + parser.push('"'); + const events1 = parser.push('he'); + const valueUpdates = events1.filter((e) => e.type === 'value-updated'); + expect(valueUpdates.length).toBeGreaterThan(0); + for (const ev of valueUpdates) { + expect(ev.delta).toBeDefined(); + } + parser.push('llo"'); + const root = parser.root as JsonStringNode; + expect(root.value).toBe('hello'); + }); + + it('should handle escaped characters (\\n, \\", \\\\)', () => { + const parser = createPartialJsonParser(); + parser.push('"line1\\nline2"'); + const root = parser.root as JsonStringNode; + expect(root.value).toBe('line1\nline2'); + }); + + it('should handle escaped double quote', () => { + const parser = createPartialJsonParser(); + parser.push('"say \\"hi\\""'); + const root = parser.root as JsonStringNode; + expect(root.value).toBe('say "hi"'); + }); + + it('should handle escaped backslash', () => { + const parser = createPartialJsonParser(); + parser.push('"a\\\\b"'); + const root = parser.root as JsonStringNode; + expect(root.value).toBe('a\\b'); + }); + + it('should handle unicode escapes (\\u0041 = A)', () => { + const parser = createPartialJsonParser(); + parser.push('"\\u0041"'); + const root = parser.root as JsonStringNode; + expect(root.value).toBe('A'); + }); + }); + + describe('numbers', () => { + it('should parse a complete integer in an array', () => { + const parser = createPartialJsonParser(); + parser.push('[42]'); + const root = parser.root as JsonArrayNode; + const num = root.children[0] as JsonNumberNode; + expect(num.type).toBe('number'); + expect(num.value).toBe(42); + expect(num.status).toBe('complete'); + }); + + it('should complete a number when followed by }', () => { + const parser = createPartialJsonParser(); + parser.push('{"a":123}'); + const root = parser.root as JsonObjectNode; + const num = root.children.get('a') as JsonNumberNode; + expect(num.type).toBe('number'); + expect(num.value).toBe(123); + expect(num.status).toBe('complete'); + }); + + it('should handle negative and decimal numbers', () => { + const parser = createPartialJsonParser(); + parser.push('[-3.14]'); + const root = parser.root as JsonArrayNode; + const num = root.children[0] as JsonNumberNode; + expect(num.value).toBe(-3.14); + expect(num.status).toBe('complete'); + }); + + it('should stream numbers at end of input', () => { + const parser = createPartialJsonParser(); + parser.push('[12'); + const root = parser.root as JsonArrayNode; + const num = root.children[0] as JsonNumberNode; + expect(num.type).toBe('number'); + expect(num.raw).toBe('12'); + expect(num.status).toBe('streaming'); + }); + }); + + describe('booleans and null', () => { + it('should parse true', () => { + const parser = createPartialJsonParser(); + parser.push('[true]'); + const root = parser.root as JsonArrayNode; + const node = root.children[0] as JsonBooleanNode; + expect(node.type).toBe('boolean'); + expect(node.value).toBe(true); + expect(node.status).toBe('complete'); + }); + + it('should parse false', () => { + const parser = createPartialJsonParser(); + parser.push('[false]'); + const root = parser.root as JsonArrayNode; + const node = root.children[0] as JsonBooleanNode; + expect(node.type).toBe('boolean'); + expect(node.value).toBe(false); + expect(node.status).toBe('complete'); + }); + + it('should parse null', () => { + const parser = createPartialJsonParser(); + parser.push('[null]'); + const root = parser.root as JsonArrayNode; + const node = root.children[0] as JsonNullNode; + expect(node.type).toBe('null'); + expect(node.status).toBe('complete'); + }); + + it('should handle partial keywords gracefully', () => { + const parser = createPartialJsonParser(); + parser.push('[tru'); + const root = parser.root as JsonArrayNode; + // Partial keyword should create a pending node + expect(root.children.length).toBe(1); + expect(root.children[0].status).toBe('pending'); + }); + }); + + describe('objects', () => { + it('should parse a simple object', () => { + const parser = createPartialJsonParser(); + parser.push('{"a":"b"}'); + const root = parser.root as JsonObjectNode; + expect(root.type).toBe('object'); + expect(root.status).toBe('complete'); + const child = root.children.get('a') as JsonStringNode; + expect(child.value).toBe('b'); + expect(child.status).toBe('complete'); + }); + + it('should stream property values', () => { + const parser = createPartialJsonParser(); + parser.push('{"name":"Al'); + const root = parser.root as JsonObjectNode; + expect(root.type).toBe('object'); + expect(root.status).toBe('streaming'); + const child = root.children.get('name') as JsonStringNode; + expect(child.type).toBe('string'); + expect(child.value).toBe('Al'); + expect(child.status).toBe('streaming'); + }); + + it('should handle multiple properties', () => { + const parser = createPartialJsonParser(); + parser.push('{"a":"1","b":"2"}'); + const root = parser.root as JsonObjectNode; + expect(root.children.size).toBe(2); + expect((root.children.get('a') as JsonStringNode).value).toBe('1'); + expect((root.children.get('b') as JsonStringNode).value).toBe('2'); + }); + + it('should handle nested objects', () => { + const parser = createPartialJsonParser(); + parser.push('{"outer":{"inner":"value"}}'); + const root = parser.root as JsonObjectNode; + const outer = root.children.get('outer') as JsonObjectNode; + expect(outer.type).toBe('object'); + const inner = outer.children.get('inner') as JsonStringNode; + expect(inner.value).toBe('value'); + }); + }); + + describe('arrays', () => { + it('should parse a simple array of numbers', () => { + const parser = createPartialJsonParser(); + parser.push('[1,2,3]'); + const root = parser.root as JsonArrayNode; + expect(root.type).toBe('array'); + expect(root.status).toBe('complete'); + expect(root.children.length).toBe(3); + expect((root.children[0] as JsonNumberNode).value).toBe(1); + expect((root.children[1] as JsonNumberNode).value).toBe(2); + expect((root.children[2] as JsonNumberNode).value).toBe(3); + }); + + it('should parse an array of strings', () => { + const parser = createPartialJsonParser(); + parser.push('["a","b","c"]'); + const root = parser.root as JsonArrayNode; + expect(root.children.length).toBe(3); + expect((root.children[0] as JsonStringNode).value).toBe('a'); + expect((root.children[1] as JsonStringNode).value).toBe('b'); + expect((root.children[2] as JsonStringNode).value).toBe('c'); + }); + + it('should parse nested arrays', () => { + const parser = createPartialJsonParser(); + parser.push('[[1,2],[3]]'); + const root = parser.root as JsonArrayNode; + expect(root.children.length).toBe(2); + const first = root.children[0] as JsonArrayNode; + expect(first.type).toBe('array'); + expect(first.children.length).toBe(2); + }); + }); + + describe('streaming complex structures', () => { + it('should build a spec-like structure token-by-token', () => { + const parser = createPartialJsonParser(); + const json = '{"type":"div","props":{"class":"main"},"children":[{"type":"span"}]}'; + // Feed one character at a time + for (const ch of json) { + parser.push(ch); + } + const root = parser.root as JsonObjectNode; + expect(root.type).toBe('object'); + expect(root.status).toBe('complete'); + const typeNode = root.children.get('type') as JsonStringNode; + expect(typeNode.value).toBe('div'); + const propsNode = root.children.get('props') as JsonObjectNode; + expect(propsNode.children.get('class')).toBeDefined(); + const childrenNode = root.children.get('children') as JsonArrayNode; + expect(childrenNode.children.length).toBe(1); + }); + + it('should maintain stable node identities across pushes', () => { + const parser = createPartialJsonParser(); + parser.push('{"name":"'); + const root1 = parser.root as JsonObjectNode; + const nameNode1 = root1.children.get('name') as JsonStringNode; + const id1 = nameNode1.id; + + parser.push('Al'); + const nameNode2 = (parser.root as JsonObjectNode).children.get( + 'name' + ) as JsonStringNode; + expect(nameNode2.id).toBe(id1); + expect(nameNode2.value).toBe('Al'); + + parser.push('ice"'); + const nameNode3 = (parser.root as JsonObjectNode).children.get( + 'name' + ) as JsonStringNode; + expect(nameNode3.id).toBe(id1); + expect(nameNode3.value).toBe('Alice'); + }); + }); + + describe('getByPath', () => { + it('should return root for empty path', () => { + const parser = createPartialJsonParser(); + parser.push('{"a":1}'); + expect(parser.getByPath('')).toBe(parser.root); + }); + + it('should look up object properties', () => { + const parser = createPartialJsonParser(); + parser.push('{"a":{"b":"c"}}'); + const node = parser.getByPath('/a/b') as JsonStringNode; + expect(node).not.toBeNull(); + expect(node.type).toBe('string'); + expect(node.value).toBe('c'); + }); + + it('should look up array indices', () => { + const parser = createPartialJsonParser(); + parser.push('{"items":["x","y","z"]}'); + const node = parser.getByPath('/items/1') as JsonStringNode; + expect(node).not.toBeNull(); + expect(node.value).toBe('y'); + }); + + it('should return null for non-existent paths', () => { + const parser = createPartialJsonParser(); + parser.push('{"a":1}'); + expect(parser.getByPath('/b')).toBeNull(); + expect(parser.getByPath('/a/b')).toBeNull(); + }); + }); + + describe('whitespace', () => { + it('should handle whitespace between tokens', () => { + const parser = createPartialJsonParser(); + parser.push('{ "a" : "b" , "c" : "d" }'); + const root = parser.root as JsonObjectNode; + expect(root.type).toBe('object'); + expect(root.status).toBe('complete'); + expect((root.children.get('a') as JsonStringNode).value).toBe('b'); + expect((root.children.get('c') as JsonStringNode).value).toBe('d'); + }); + + it('should handle leading whitespace', () => { + const parser = createPartialJsonParser(); + parser.push(' "hello"'); + const root = parser.root as JsonStringNode; + expect(root.value).toBe('hello'); + }); + }); +}); diff --git a/libs/partial-json/src/lib/parser.ts b/libs/partial-json/src/lib/parser.ts new file mode 100644 index 000000000..8c91808e9 --- /dev/null +++ b/libs/partial-json/src/lib/parser.ts @@ -0,0 +1,472 @@ +// SPDX-License-Identifier: PolyForm-Noncommercial-1.0.0 +import type { + JsonNode, + JsonObjectNode, + JsonArrayNode, + JsonStringNode, + JsonNumberNode, + JsonBooleanNode, + JsonNullNode, + ParseEvent, + PartialJsonParser, +} from './types'; + +type State = + | 'EXPECT_VALUE' + | 'IN_STRING' + | 'IN_STRING_ESCAPE' + | 'IN_STRING_UNICODE' + | 'IN_NUMBER' + | 'IN_KEYWORD' + | 'EXPECT_KEY' + | 'IN_KEY_STRING' + | 'IN_KEY_STRING_ESCAPE' + | 'IN_KEY_STRING_UNICODE' + | 'EXPECT_COLON' + | 'AFTER_VALUE'; + +const ESCAPE_MAP: Record<string, string> = { + '"': '"', + '\\': '\\', + '/': '/', + b: '\b', + f: '\f', + n: '\n', + r: '\r', + t: '\t', +}; + +const KEYWORDS: Record<string, { type: 'boolean' | 'null'; value: boolean | null }> = { + true: { type: 'boolean', value: true }, + false: { type: 'boolean', value: false }, + null: { type: 'null', value: null }, +}; + +export function createPartialJsonParser(): PartialJsonParser { + let nextId = 0; + let root: JsonNode | null = null; + let state: State = 'EXPECT_VALUE'; + let currentNode: JsonNode | null = null; + + // For string values + let stringNode: JsonStringNode | null = null; + + // For unicode escapes + let unicodeBuffer = ''; + let unicodeCount = 0; + + // For key strings in objects + let keyBuffer = ''; + let keyUnicodeBuffer = ''; + let keyUnicodeCount = 0; + + // For keywords (true, false, null) + let keywordBuffer = ''; + let keywordNode: JsonNode | null = null; + + // Stack of container nodes for nested structures + const containerStack: (JsonObjectNode | JsonArrayNode)[] = []; + + function makeId(): number { + return nextId++; + } + + function createStringNode(parent: JsonNode | null, key: string | number | null): JsonStringNode { + return { + id: makeId(), + type: 'string', + status: 'streaming', + parent, + key, + value: '', + }; + } + + function createNumberNode(parent: JsonNode | null, key: string | number | null, firstChar: string): JsonNumberNode { + return { + id: makeId(), + type: 'number', + status: 'streaming', + parent, + key, + raw: firstChar, + value: null, + }; + } + + function createObjectNode(parent: JsonNode | null, key: string | number | null): JsonObjectNode { + return { + id: makeId(), + type: 'object', + status: 'streaming', + parent, + key, + children: new Map(), + pendingKey: null, + }; + } + + function createArrayNode(parent: JsonNode | null, key: string | number | null): JsonArrayNode { + return { + id: makeId(), + type: 'array', + status: 'streaming', + parent, + key, + children: [], + }; + } + + function currentContainer(): JsonObjectNode | JsonArrayNode | null { + return containerStack.length > 0 ? containerStack[containerStack.length - 1] : null; + } + + function getKeyForNewChild(): string | number | null { + const container = currentContainer(); + if (!container) return null; + if (container.type === 'object') { + return container.pendingKey; + } + return container.children.length; + } + + function attachChild(node: JsonNode): void { + const container = currentContainer(); + if (!container) { + root = node; + return; + } + if (container.type === 'object') { + const objContainer = container; + const key = objContainer.pendingKey!; + node.key = key; + node.parent = objContainer; + objContainer.children.set(key, node); + objContainer.pendingKey = null; + } else { + node.key = container.children.length; + node.parent = container; + container.children.push(node); + } + } + + function completeNumber(numNode: JsonNumberNode, events: ParseEvent[]): void { + numNode.value = Number(numNode.raw); + numNode.status = 'complete'; + events.push({ type: 'node-completed', node: numNode }); + } + + function completeKeyword(events: ParseEvent[]): void { + const kw = keywordBuffer; + const kwDef = KEYWORDS[kw]; + if (!kwDef) return; + + const node = keywordNode!; + if (kwDef.type === 'boolean') { + (node as JsonBooleanNode).value = kwDef.value as boolean; + } + node.status = 'complete'; + events.push({ type: 'node-completed', node }); + keywordBuffer = ''; + keywordNode = null; + } + + function push(chunk: string): ParseEvent[] { + const events: ParseEvent[] = []; + + for (let i = 0; i < chunk.length; i++) { + const ch = chunk[i]; + + switch (state) { + case 'EXPECT_VALUE': { + if (ch === ' ' || ch === '\t' || ch === '\n' || ch === '\r') continue; + if (ch === '"') { + const key = getKeyForNewChild(); + const node = createStringNode(currentContainer(), key); + attachChild(node); + if (!root) root = node; + stringNode = node; + events.push({ type: 'node-created', node }); + state = 'IN_STRING'; + } else if (ch === '{') { + const key = getKeyForNewChild(); + const node = createObjectNode(currentContainer(), key); + attachChild(node); + if (!root) root = node; + events.push({ type: 'node-created', node }); + containerStack.push(node); + state = 'EXPECT_KEY'; + } else if (ch === '[') { + const key = getKeyForNewChild(); + const node = createArrayNode(currentContainer(), key); + attachChild(node); + if (!root) root = node; + events.push({ type: 'node-created', node }); + containerStack.push(node); + state = 'EXPECT_VALUE'; + } else if (ch === ']') { + // Empty array close + const container = currentContainer(); + if (container && container.type === 'array') { + container.status = 'complete'; + containerStack.pop(); + events.push({ type: 'node-completed', node: container }); + state = containerStack.length > 0 ? 'AFTER_VALUE' : 'AFTER_VALUE'; + } + } else if (ch === '-' || (ch >= '0' && ch <= '9')) { + const key = getKeyForNewChild(); + const node = createNumberNode(currentContainer(), key, ch); + attachChild(node); + if (!root) root = node; + events.push({ type: 'node-created', node }); + currentNode = node; + state = 'IN_NUMBER'; + } else if (ch === 't' || ch === 'f' || ch === 'n') { + const key = getKeyForNewChild(); + let node: JsonNode; + if (ch === 'n') { + node = { + id: makeId(), + type: 'null', + status: 'pending', + parent: currentContainer(), + key, + } as JsonNullNode; + } else { + node = { + id: makeId(), + type: 'boolean', + status: 'pending', + parent: currentContainer(), + key, + value: ch === 't', + } as JsonBooleanNode; + } + attachChild(node); + if (!root) root = node; + events.push({ type: 'node-created', node }); + keywordBuffer = ch; + keywordNode = node; + state = 'IN_KEYWORD'; + } + break; + } + + case 'IN_STRING': { + if (ch === '\\') { + state = 'IN_STRING_ESCAPE'; + } else if (ch === '"') { + stringNode!.status = 'complete'; + events.push({ type: 'node-completed', node: stringNode! }); + stringNode = null; + state = containerStack.length > 0 ? 'AFTER_VALUE' : 'AFTER_VALUE'; + } else { + stringNode!.value += ch; + events.push({ type: 'value-updated', node: stringNode!, delta: ch }); + } + break; + } + + case 'IN_STRING_ESCAPE': { + if (ch === 'u') { + unicodeBuffer = ''; + unicodeCount = 0; + state = 'IN_STRING_UNICODE'; + } else { + const mapped = ESCAPE_MAP[ch] ?? ch; + stringNode!.value += mapped; + events.push({ type: 'value-updated', node: stringNode!, delta: mapped }); + state = 'IN_STRING'; + } + break; + } + + case 'IN_STRING_UNICODE': { + unicodeBuffer += ch; + unicodeCount++; + if (unicodeCount === 4) { + const codePoint = parseInt(unicodeBuffer, 16); + const char = String.fromCharCode(codePoint); + stringNode!.value += char; + events.push({ type: 'value-updated', node: stringNode!, delta: char }); + unicodeBuffer = ''; + unicodeCount = 0; + state = 'IN_STRING'; + } + break; + } + + case 'IN_NUMBER': { + const numNode = currentNode as JsonNumberNode; + if ((ch >= '0' && ch <= '9') || ch === '.' || ch === 'e' || ch === 'E' || ch === '+' || ch === '-') { + numNode.raw += ch; + } else { + // Number terminated by this character + completeNumber(numNode, events); + currentNode = null; + // Re-process this character + if (ch === ',' || ch === ']' || ch === '}') { + state = 'AFTER_VALUE'; + i--; // reprocess + } else { + state = 'AFTER_VALUE'; + i--; // reprocess + } + } + break; + } + + case 'IN_KEYWORD': { + keywordBuffer += ch; + const possibleKeywords = Object.keys(KEYWORDS).filter((k) => k.startsWith(keywordBuffer)); + if (possibleKeywords.length === 0) { + // Not a valid keyword continuation - treat as terminator + state = 'AFTER_VALUE'; + i--; // reprocess + } else { + const exact = KEYWORDS[keywordBuffer]; + if (exact) { + completeKeyword(events); + state = containerStack.length > 0 ? 'AFTER_VALUE' : 'AFTER_VALUE'; + } + // Otherwise still accumulating + } + break; + } + + case 'EXPECT_KEY': { + if (ch === ' ' || ch === '\t' || ch === '\n' || ch === '\r') continue; + if (ch === '"') { + keyBuffer = ''; + state = 'IN_KEY_STRING'; + } else if (ch === '}') { + // Empty object + const container = currentContainer(); + if (container && container.type === 'object') { + container.status = 'complete'; + containerStack.pop(); + events.push({ type: 'node-completed', node: container }); + state = containerStack.length > 0 ? 'AFTER_VALUE' : 'AFTER_VALUE'; + } + } + break; + } + + case 'IN_KEY_STRING': { + if (ch === '\\') { + state = 'IN_KEY_STRING_ESCAPE'; + } else if (ch === '"') { + const container = currentContainer() as JsonObjectNode; + container.pendingKey = keyBuffer; + state = 'EXPECT_COLON'; + } else { + keyBuffer += ch; + } + break; + } + + case 'IN_KEY_STRING_ESCAPE': { + if (ch === 'u') { + keyUnicodeBuffer = ''; + keyUnicodeCount = 0; + state = 'IN_KEY_STRING_UNICODE'; + } else { + const mapped = ESCAPE_MAP[ch] ?? ch; + keyBuffer += mapped; + state = 'IN_KEY_STRING'; + } + break; + } + + case 'IN_KEY_STRING_UNICODE': { + keyUnicodeBuffer += ch; + keyUnicodeCount++; + if (keyUnicodeCount === 4) { + const codePoint = parseInt(keyUnicodeBuffer, 16); + keyBuffer += String.fromCharCode(codePoint); + keyUnicodeBuffer = ''; + keyUnicodeCount = 0; + state = 'IN_KEY_STRING'; + } + break; + } + + case 'EXPECT_COLON': { + if (ch === ' ' || ch === '\t' || ch === '\n' || ch === '\r') continue; + if (ch === ':') { + state = 'EXPECT_VALUE'; + } + break; + } + + case 'AFTER_VALUE': { + if (ch === ' ' || ch === '\t' || ch === '\n' || ch === '\r') continue; + if (ch === ',') { + const container = currentContainer(); + if (container) { + if (container.type === 'object') { + state = 'EXPECT_KEY'; + } else { + state = 'EXPECT_VALUE'; + } + } + } else if (ch === '}') { + const container = currentContainer(); + if (container && container.type === 'object') { + container.status = 'complete'; + containerStack.pop(); + events.push({ type: 'node-completed', node: container }); + state = containerStack.length > 0 ? 'AFTER_VALUE' : 'AFTER_VALUE'; + } + } else if (ch === ']') { + const container = currentContainer(); + if (container && container.type === 'array') { + container.status = 'complete'; + containerStack.pop(); + events.push({ type: 'node-completed', node: container }); + state = containerStack.length > 0 ? 'AFTER_VALUE' : 'AFTER_VALUE'; + } + } + break; + } + } + } + + return events; + } + + function getByPath(path: string): JsonNode | null { + if (!root) return null; + if (path === '' || path === '/') return root; + + // Handle paths that don't start with / + const normalizedPath = path.startsWith('/') ? path : '/' + path; + const segments = normalizedPath.split('/').slice(1); // remove leading empty string + + let current: JsonNode = root; + for (const segment of segments) { + if (current.type === 'object') { + const child = (current as JsonObjectNode).children.get(segment); + if (!child) return null; + current = child; + } else if (current.type === 'array') { + const index = parseInt(segment, 10); + if (isNaN(index)) return null; + const child = (current as JsonArrayNode).children[index]; + if (!child) return null; + current = child; + } else { + return null; + } + } + return current; + } + + return { + push, + get root() { + return root; + }, + getByPath, + }; +} From 8718b2a91dff258188de6400458fb0effc24c95c Mon Sep 17 00:00:00 2001 From: Brian Love <brian@liveloveapp.com> Date: Wed, 8 Apr 2026 13:00:49 -0700 Subject: [PATCH 08/13] feat(partial-json): add materialization with structural sharing Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com> --- libs/partial-json/src/index.ts | 1 + libs/partial-json/src/lib/materialize.spec.ts | 144 ++++++++++++++++++ libs/partial-json/src/lib/materialize.ts | 97 ++++++++++++ 3 files changed, 242 insertions(+) create mode 100644 libs/partial-json/src/lib/materialize.spec.ts create mode 100644 libs/partial-json/src/lib/materialize.ts diff --git a/libs/partial-json/src/index.ts b/libs/partial-json/src/index.ts index ae94ec330..4f15afd08 100644 --- a/libs/partial-json/src/index.ts +++ b/libs/partial-json/src/index.ts @@ -6,3 +6,4 @@ export type { JsonNode, ParseEvent, PartialJsonParser, } from './lib/types'; export { createPartialJsonParser } from './lib/parser'; +export { materialize } from './lib/materialize'; diff --git a/libs/partial-json/src/lib/materialize.spec.ts b/libs/partial-json/src/lib/materialize.spec.ts new file mode 100644 index 000000000..7075ae9de --- /dev/null +++ b/libs/partial-json/src/lib/materialize.spec.ts @@ -0,0 +1,144 @@ +// SPDX-License-Identifier: PolyForm-Noncommercial-1.0.0 +import { describe, it, expect } from 'vitest'; +import { createPartialJsonParser } from './parser'; +import { materialize } from './materialize'; +import type { JsonArrayNode, JsonObjectNode, JsonStringNode } from './types'; + +describe('materialize', () => { + describe('basic materialization', () => { + it('should materialize a string node', () => { + const parser = createPartialJsonParser(); + parser.push('"hello"'); + expect(materialize(parser.root!)).toBe('hello'); + }); + + it('should materialize a number node inside an array', () => { + const parser = createPartialJsonParser(); + parser.push('[42]'); + const result = materialize(parser.root!) as unknown[]; + expect(result).toEqual([42]); + }); + + it('should materialize a boolean true', () => { + const parser = createPartialJsonParser(); + parser.push('[true]'); + const result = materialize(parser.root!) as unknown[]; + expect(result).toEqual([true]); + }); + + it('should materialize a boolean false', () => { + const parser = createPartialJsonParser(); + parser.push('[false]'); + const result = materialize(parser.root!) as unknown[]; + expect(result).toEqual([false]); + }); + + it('should materialize null', () => { + const parser = createPartialJsonParser(); + parser.push('[null]'); + const result = materialize(parser.root!) as unknown[]; + expect(result).toEqual([null]); + }); + + it('should materialize a simple object', () => { + const parser = createPartialJsonParser(); + parser.push('{"a": 1, "b": "two"}'); + const result = materialize(parser.root!) as Record<string, unknown>; + expect(result).toEqual({ a: 1, b: 'two' }); + }); + + it('should materialize an array', () => { + const parser = createPartialJsonParser(); + parser.push('[1, 2, 3]'); + const result = materialize(parser.root!) as unknown[]; + expect(result).toEqual([1, 2, 3]); + }); + + it('should materialize nested structures', () => { + const parser = createPartialJsonParser(); + parser.push('{"items": [{"name": "a"}, {"name": "b"}]}'); + const result = materialize(parser.root!) as Record<string, unknown>; + expect(result).toEqual({ items: [{ name: 'a' }, { name: 'b' }] }); + }); + + it('should materialize a partial streaming string', () => { + const parser = createPartialJsonParser(); + parser.push('{"msg": "hel'); + const result = materialize(parser.root!) as Record<string, unknown>; + expect(result).toEqual({ msg: 'hel' }); + }); + + it('should materialize a partial streaming number with best-effort', () => { + const parser = createPartialJsonParser(); + parser.push('[12'); + // Number is still streaming (not terminated), so best-effort parse + const result = materialize(parser.root!) as unknown[]; + expect(result).toEqual([12]); + }); + }); + + describe('structural sharing', () => { + it('should return the same reference for unchanged subtrees', () => { + const parser = createPartialJsonParser(); + parser.push('{"a": {"x": 1}, "b": "hel'); + + const result1 = materialize(parser.root!) as Record<string, unknown>; + const aRef1 = result1['a']; + + // Stream more into b + parser.push('lo"'); + const result2 = materialize(parser.root!) as Record<string, unknown>; + const aRef2 = result2['a']; + + // a subtree didn't change, should be same reference + expect(aRef2).toBe(aRef1); + // b did change + expect(result2['b']).toBe('hello'); + }); + + it('should preserve sibling references when one property changes', () => { + const parser = createPartialJsonParser(); + parser.push('{"x": [1, 2], "y": [3, 4], "z": "stream'); + + const result1 = materialize(parser.root!) as Record<string, unknown>; + const xRef1 = result1['x']; + const yRef1 = result1['y']; + + // Only z changes + parser.push('ing"'); + const result2 = materialize(parser.root!) as Record<string, unknown>; + + expect(result2['x']).toBe(xRef1); + expect(result2['y']).toBe(yRef1); + expect(result2['z']).toBe('streaming'); + }); + + it('should return the same reference when nothing changed between calls', () => { + const parser = createPartialJsonParser(); + parser.push('{"a": 1, "b": 2}'); + + const result1 = materialize(parser.root!); + const result2 = materialize(parser.root!); + + expect(result2).toBe(result1); + }); + + it('should detect changes in nested arrays', () => { + const parser = createPartialJsonParser(); + parser.push('{"items": [1'); + + const result1 = materialize(parser.root!) as Record<string, unknown>; + const items1 = result1['items'] as unknown[]; + expect(items1).toEqual([1]); + + // Add another item + parser.push(', 2]'); + const result2 = materialize(parser.root!) as Record<string, unknown>; + const items2 = result2['items'] as unknown[]; + expect(items2).toEqual([1, 2]); + + // items array changed (new child), so different reference + expect(items2).not.toBe(items1); + }); + }); +}); diff --git a/libs/partial-json/src/lib/materialize.ts b/libs/partial-json/src/lib/materialize.ts new file mode 100644 index 000000000..dd3bda7c3 --- /dev/null +++ b/libs/partial-json/src/lib/materialize.ts @@ -0,0 +1,97 @@ +// SPDX-License-Identifier: PolyForm-Noncommercial-1.0.0 +import type { JsonNode, JsonObjectNode, JsonArrayNode } from './types'; + +/** + * Cache entry storing the last materialized value and a version fingerprint + * used to detect whether a subtree has changed. + */ +interface CacheEntry { + version: string; + value: unknown; +} + +const cache = new WeakMap<JsonNode, CacheEntry>(); + +/** + * Compute a lightweight version fingerprint for a node. + * The fingerprint captures enough state to detect any change in the + * subtree rooted at this node. + */ +function computeVersion(node: JsonNode): string { + switch (node.type) { + case 'string': + return `s:${node.status}:${node.value}`; + case 'number': + return `n:${node.status}:${node.raw}`; + case 'boolean': + return `b:${node.value}`; + case 'null': + return 'null'; + case 'object': { + const obj = node as JsonObjectNode; + const parts: string[] = [`o:${obj.status}:${obj.children.size}`]; + for (const [key, child] of obj.children) { + parts.push(`${key}=${computeVersion(child)}`); + } + return parts.join('|'); + } + case 'array': { + const arr = node as JsonArrayNode; + const parts: string[] = [`a:${arr.status}:${arr.children.length}`]; + for (const child of arr.children) { + parts.push(computeVersion(child)); + } + return parts.join('|'); + } + } +} + +/** + * Convert a parse-tree node to a plain JS value. + * + * Structural sharing: unchanged subtrees return the exact same object + * reference across consecutive calls, enabling cheap `===` checks + * in downstream consumers (e.g. Angular signals, React memos). + */ +export function materialize(node: JsonNode): unknown { + const version = computeVersion(node); + const cached = cache.get(node); + if (cached && cached.version === version) { + return cached.value; + } + + let value: unknown; + + switch (node.type) { + case 'string': + value = node.value; + break; + case 'number': + // Complete numbers have a parsed value; streaming ones get best-effort + value = node.value !== null ? node.value : Number(node.raw); + break; + case 'boolean': + value = node.value; + break; + case 'null': + value = null; + break; + case 'object': { + const obj = node as JsonObjectNode; + const result: Record<string, unknown> = {}; + for (const [key, child] of obj.children) { + result[key] = materialize(child); + } + value = result; + break; + } + case 'array': { + const arr = node as JsonArrayNode; + value = arr.children.map((child) => materialize(child)); + break; + } + } + + cache.set(node, { version, value }); + return value; +} From 31d2ed6417e4c6b8a538982715cb20b4a3c21356 Mon Sep 17 00:00:00 2001 From: Brian Love <brian@liveloveapp.com> Date: Wed, 8 Apr 2026 13:07:25 -0700 Subject: [PATCH 09/13] perf(render): add element-level memoization via Object.is equality When the spec signal updates (e.g., from streaming tokens), unchanged elements keep the same object reference via structural sharing. The computed() equality check now uses Object.is so downstream computeds (componentClass, visible, resolvedInputs) skip re-evaluation for elements whose reference didn't change. Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com> --- .../src/lib/render-element.component.spec.ts | 46 +++++++++++++++++++ .../src/lib/render-element.component.ts | 11 ++--- 2 files changed, 51 insertions(+), 6 deletions(-) diff --git a/libs/render/src/lib/render-element.component.spec.ts b/libs/render/src/lib/render-element.component.spec.ts index a5a669efa..367fa459c 100644 --- a/libs/render/src/lib/render-element.component.spec.ts +++ b/libs/render/src/lib/render-element.component.spec.ts @@ -326,3 +326,49 @@ describe('RenderElementComponent — children rendering', () => { expect(childKeys).toEqual([]); }); }); + +describe('RenderElementComponent — element-level memoization', () => { + it('element lookup returns same reference when spec changes but element is unchanged', () => { + TestBed.configureTestingModule({}); + TestBed.runInInjectionContext(() => { + const { signal, computed } = require('@angular/core'); + + // Simulate two spec snapshots with structural sharing: + // el-1 is the same reference in both, el-2 is different + const sharedEl1 = { type: 'Text', props: { label: 'Same' } }; + const spec1 = createSpec({ + root: { type: 'Container', props: {}, children: ['el-1', 'el-2'] }, + 'el-1': sharedEl1, + 'el-2': { type: 'Text', props: { label: 'Old' } }, + }); + + // spec2 reuses the same el-1 reference (structural sharing) + const spec2 = { + ...spec1, + elements: { + ...spec1.elements, + 'el-2': { type: 'Text', props: { label: 'New' } }, + }, + } as Spec; + // el-1 is the SAME object reference + expect(spec2.elements['el-1']).toBe(spec1.elements['el-1']); + + // Simulate what the component does: computed with Object.is equality + const specSignal = signal(spec1); + const elementKey = signal('el-1'); + const element = computed( + () => specSignal()?.elements?.[elementKey()], + { equal: Object.is }, + ); + + const ref1 = element(); + expect(ref1).toBe(sharedEl1); + + // Update spec — el-1 reference unchanged due to structural sharing + specSignal.set(spec2); + const ref2 = element(); + // With Object.is equality, computed returns same reference + expect(ref2).toBe(ref1); + }); + }); +}); diff --git a/libs/render/src/lib/render-element.component.ts b/libs/render/src/lib/render-element.component.ts index ebeb4e79b..2626b6ef0 100644 --- a/libs/render/src/lib/render-element.component.ts +++ b/libs/render/src/lib/render-element.component.ts @@ -64,12 +64,11 @@ export class RenderElementComponent { private readonly repeatScope = inject(REPEAT_SCOPE, { optional: true }); readonly parentInjector = inject(Injector); - /** The UIElement definition from the spec. */ - readonly element: Signal<UIElement | undefined> = computed(() => { - const spec = this.spec(); - const key = this.elementKey(); - return spec?.elements?.[key]; - }); + /** The UIElement definition from the spec. Only propagates when reference changes. */ + readonly element: Signal<UIElement | undefined> = computed( + () => this.spec()?.elements?.[this.elementKey()], + { equal: Object.is }, + ); /** The Angular component class for this element type. */ readonly componentClass = computed<AngularComponentRenderer | null>(() => { From b6691a8f91d68dce4970297579f8a497f8d8167a Mon Sep 17 00:00:00 2001 From: Brian Love <brian@liveloveapp.com> Date: Wed, 8 Apr 2026 13:11:18 -0700 Subject: [PATCH 10/13] feat(chat): add ParseTreeStore for streaming JSON-to-spec materialization Bridges @cacheplane/partial-json parser with Angular signals, providing incremental spec materialization with structural sharing and per-element accumulation state tracking (hasType, hasProps, hasChildren, streaming). Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com> --- .../lib/streaming/parse-tree-store.spec.ts | 124 ++++++++++++++++++ .../src/lib/streaming/parse-tree-store.ts | 70 ++++++++++ 2 files changed, 194 insertions(+) create mode 100644 libs/chat/src/lib/streaming/parse-tree-store.spec.ts create mode 100644 libs/chat/src/lib/streaming/parse-tree-store.ts diff --git a/libs/chat/src/lib/streaming/parse-tree-store.spec.ts b/libs/chat/src/lib/streaming/parse-tree-store.spec.ts new file mode 100644 index 000000000..1df8f768b --- /dev/null +++ b/libs/chat/src/lib/streaming/parse-tree-store.spec.ts @@ -0,0 +1,124 @@ +// SPDX-License-Identifier: PolyForm-Noncommercial-1.0.0 +import { describe, it, expect } from 'vitest'; +import { TestBed } from '@angular/core/testing'; +import { createPartialJsonParser } from '@cacheplane/partial-json'; +import type { Spec } from '@json-render/core'; +import { createParseTreeStore } from './parse-tree-store'; + +describe('ParseTreeStore', () => { + function setup() { + const parser = createPartialJsonParser(); + let store!: ReturnType<typeof createParseTreeStore>; + TestBed.runInInjectionContext(() => { + store = createParseTreeStore(parser); + }); + return { parser, store }; + } + + it('spec is null initially', () => { + const { store } = setup(); + expect(store.spec()).toBeNull(); + }); + + it('elementStates is empty initially', () => { + const { store } = setup(); + expect(store.elementStates().size).toBe(0); + }); + + it('materializes a complete spec from streamed JSON', () => { + const { store } = setup(); + const json = JSON.stringify({ + root: 'el-1', + elements: { + 'el-1': { type: 'card', props: { title: 'Hello' }, children: ['el-2'] }, + 'el-2': { type: 'text', props: { content: 'World' } }, + }, + }); + store.push(json); + const spec = store.spec() as Spec; + expect(spec).not.toBeNull(); + expect(spec.root).toBe('el-1'); + expect(spec.elements['el-1'].type).toBe('card'); + expect(spec.elements['el-2'].type).toBe('text'); + }); + + it('updates spec incrementally as tokens stream', () => { + const { store } = setup(); + const json = '{"root":"el-1","elements":{"el-1":{"type":"card","props":{"title":"Hi"}}}}'; + + // Feed partial chunks + store.push('{"root":'); + const spec1 = store.spec(); + expect(spec1).not.toBeNull(); + + store.push('"el-1","elements":{'); + const spec2 = store.spec(); + expect(spec2).not.toBeNull(); + expect((spec2 as any).root).toBe('el-1'); + + store.push('"el-1":{"type":"card","props":{"title":"Hi"}}}}'); + const spec3 = store.spec() as Spec; + expect(spec3.root).toBe('el-1'); + expect(spec3.elements['el-1'].type).toBe('card'); + }); + + it('preserves structural sharing for unchanged elements', () => { + const { store } = setup(); + + // Push a complete first element + store.push('{"root":"el-1","elements":{"el-1":{"type":"card","props":{"title":"A"}}'); + const spec1 = store.spec() as any; + const el1Ref = spec1?.elements?.['el-1']; + + // Push more data that doesn't change el-1 + store.push(',"el-2":{"type":"text","props":{"content":"B"}}}}'); + const spec2 = store.spec() as any; + + // el-1 subtree should be structurally shared (same reference) + expect(spec2.elements['el-1']).toBe(el1Ref); + }); + + it('tracks element accumulation states for hasType and hasProps', () => { + const { store } = setup(); + + store.push('{"root":"el-1","elements":{"el-1":{"type":"card"'); + let states = store.elementStates(); + const el1State = states.get('el-1'); + expect(el1State).toBeDefined(); + expect(el1State!.hasType).toBe(true); + expect(el1State!.hasProps).toBe(false); + + store.push(',"props":{"title":"Hi"}}}}'); + states = store.elementStates(); + const el1StateAfter = states.get('el-1'); + expect(el1StateAfter!.hasType).toBe(true); + expect(el1StateAfter!.hasProps).toBe(true); + }); + + it('tracks hasChildren state', () => { + const { store } = setup(); + + store.push('{"root":"el-1","elements":{"el-1":{"type":"card","props":{}'); + let states = store.elementStates(); + expect(states.get('el-1')?.hasChildren).toBe(false); + + store.push(',"children":["el-2"]}}}'); + states = store.elementStates(); + expect(states.get('el-1')?.hasChildren).toBe(true); + }); + + it('tracks streaming state per element', () => { + const { store } = setup(); + + // Start streaming an element + store.push('{"root":"el-1","elements":{"el-1":{"type":"card"'); + let states = store.elementStates(); + expect(states.get('el-1')?.streaming).toBe(true); + + // Complete the element + store.push(',"props":{}}}}'); + states = store.elementStates(); + // After closing brace the element object is complete + expect(states.get('el-1')?.streaming).toBe(false); + }); +}); diff --git a/libs/chat/src/lib/streaming/parse-tree-store.ts b/libs/chat/src/lib/streaming/parse-tree-store.ts new file mode 100644 index 000000000..db2fc71ae --- /dev/null +++ b/libs/chat/src/lib/streaming/parse-tree-store.ts @@ -0,0 +1,70 @@ +// SPDX-License-Identifier: PolyForm-Noncommercial-1.0.0 +import { signal, type Signal } from '@angular/core'; +import type { Spec } from '@json-render/core'; +import type { PartialJsonParser, JsonObjectNode } from '@cacheplane/partial-json'; +import { materialize } from '@cacheplane/partial-json'; + +export interface ElementAccumulationState { + hasType: boolean; + hasProps: boolean; + hasChildren: boolean; + streaming: boolean; +} + +export interface ParseTreeStore { + push(chunk: string): void; + readonly spec: Signal<Spec | null>; + readonly elementStates: Signal<Map<string, ElementAccumulationState>>; +} + +export function createParseTreeStore(parser: PartialJsonParser): ParseTreeStore { + const specSignal = signal<Spec | null>(null); + const elementStatesSignal = signal<Map<string, ElementAccumulationState>>(new Map()); + + function computeElementStates(materialized: any): Map<string, ElementAccumulationState> { + const states = new Map<string, ElementAccumulationState>(); + if (!materialized || typeof materialized !== 'object' || !materialized.elements) { + return states; + } + + const elements = materialized.elements as Record<string, any>; + // We also need the parse tree to know if the element object node is still streaming + const elementsNode = parser.getByPath('/elements') as JsonObjectNode | null; + + for (const [key, el] of Object.entries(elements)) { + if (!el || typeof el !== 'object') continue; + + let streaming = true; + if (elementsNode) { + const elNode = elementsNode.children.get(key); + if (elNode && elNode.status === 'complete') { + streaming = false; + } + } + + states.set(key, { + hasType: 'type' in el && el.type !== undefined, + hasProps: 'props' in el && el.props !== undefined, + hasChildren: 'children' in el && el.children !== undefined, + streaming, + }); + } + + return states; + } + + function push(chunk: string): void { + parser.push(chunk); + if (parser.root) { + const materialized = materialize(parser.root); + specSignal.set(materialized as Spec); + elementStatesSignal.set(computeElementStates(materialized)); + } + } + + return { + push, + spec: specSignal.asReadonly(), + elementStates: elementStatesSignal.asReadonly(), + }; +} From 2851bba5a7989365311be60e020b251f13e32011 Mon Sep 17 00:00:00 2001 From: Brian Love <brian@liveloveapp.com> Date: Wed, 8 Apr 2026 13:11:27 -0700 Subject: [PATCH 11/13] feat(chat): add ContentClassifier for streaming content type detection Classifies incoming message content as markdown, json-render, or a2ui based on first non-whitespace character. Internally creates a ParseTreeStore for JSON content, computing deltas from full snapshots and exposing spec/elementStates/streaming signals. Type can upgrade (undetermined -> markdown/json-render) but never downgrade. Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com> --- .../lib/streaming/content-classifier.spec.ts | 193 ++++++++++++++++++ .../src/lib/streaming/content-classifier.ts | 156 ++++++++++++++ 2 files changed, 349 insertions(+) create mode 100644 libs/chat/src/lib/streaming/content-classifier.spec.ts create mode 100644 libs/chat/src/lib/streaming/content-classifier.ts diff --git a/libs/chat/src/lib/streaming/content-classifier.spec.ts b/libs/chat/src/lib/streaming/content-classifier.spec.ts new file mode 100644 index 000000000..68f99f34b --- /dev/null +++ b/libs/chat/src/lib/streaming/content-classifier.spec.ts @@ -0,0 +1,193 @@ +// SPDX-License-Identifier: PolyForm-Noncommercial-1.0.0 +import { describe, it, expect } from 'vitest'; +import { TestBed } from '@angular/core/testing'; +import type { Spec } from '@json-render/core'; +import { createContentClassifier, type ContentClassifier } from './content-classifier'; + +describe('ContentClassifier', () => { + function setup(): ContentClassifier { + let classifier!: ContentClassifier; + TestBed.runInInjectionContext(() => { + classifier = createContentClassifier(); + }); + return classifier; + } + + describe('initial state', () => { + it('type is undetermined', () => { + const c = setup(); + expect(c.type()).toBe('undetermined'); + }); + + it('markdown is empty', () => { + const c = setup(); + expect(c.markdown()).toBe(''); + }); + + it('spec is null', () => { + const c = setup(); + expect(c.spec()).toBeNull(); + }); + + it('elementStates is empty', () => { + const c = setup(); + expect(c.elementStates().size).toBe(0); + }); + + it('streaming is false', () => { + const c = setup(); + expect(c.streaming()).toBe(false); + }); + }); + + describe('markdown detection', () => { + it('detects plain text as markdown', () => { + const c = setup(); + c.update('Hello world'); + expect(c.type()).toBe('markdown'); + expect(c.markdown()).toBe('Hello world'); + }); + + it('accumulates markdown across updates', () => { + const c = setup(); + c.update('Hello'); + c.update('Hello world'); + expect(c.markdown()).toBe('Hello world'); + }); + + it('spec remains null for markdown', () => { + const c = setup(); + c.update('# Some heading\nSome text'); + expect(c.spec()).toBeNull(); + }); + }); + + describe('json-render detection', () => { + it('detects leading { as json-render', () => { + const c = setup(); + c.update('{'); + expect(c.type()).toBe('json-render'); + }); + + it('produces spec from streamed JSON', () => { + const c = setup(); + const json = JSON.stringify({ + root: 'el-1', + elements: { + 'el-1': { type: 'card', props: { title: 'Hello' } }, + }, + }); + c.update(json); + const spec = c.spec() as Spec; + expect(spec).not.toBeNull(); + expect(spec.root).toBe('el-1'); + expect(spec.elements['el-1'].type).toBe('card'); + }); + + it('streams incrementally', () => { + const c = setup(); + c.update('{"root":'); + expect(c.type()).toBe('json-render'); + expect(c.spec()).not.toBeNull(); + + c.update('{"root":"el-1","elements":{}}'); + const spec = c.spec() as Spec; + expect(spec.root).toBe('el-1'); + }); + + it('markdown is empty for pure JSON', () => { + const c = setup(); + c.update('{"root":"el-1","elements":{}}'); + expect(c.markdown()).toBe(''); + }); + }); + + describe('delta processing', () => { + it('only processes new characters', () => { + const c = setup(); + c.update('{"root":'); + const spec1 = c.spec(); + + // Same content — no delta to process + c.update('{"root":'); + const spec2 = c.spec(); + + // Same reference since nothing changed + expect(spec2).toBe(spec1); + }); + + it('handles empty delta (same content twice)', () => { + const c = setup(); + c.update('Hello'); + c.update('Hello'); + expect(c.markdown()).toBe('Hello'); + expect(c.type()).toBe('markdown'); + }); + }); + + describe('type transitions', () => { + it('never downgrades from markdown', () => { + const c = setup(); + c.update('Hello'); + expect(c.type()).toBe('markdown'); + + // Even if subsequent content looks like JSON, type doesn't downgrade + c.update('Hello {"root":"el-1"}'); + expect(c.type()).not.toBe('json-render'); + }); + + it('never downgrades from json-render', () => { + const c = setup(); + c.update('{"root":"el-1"}'); + expect(c.type()).toBe('json-render'); + + // Stays json-render even with more content + c.update('{"root":"el-1","elements":{}}'); + expect(c.type()).toBe('json-render'); + }); + }); + + describe('streaming state', () => { + it('is true while content is arriving for json-render', () => { + const c = setup(); + c.update('{"root":'); + expect(c.streaming()).toBe(true); + }); + + it('is false after complete JSON', () => { + const c = setup(); + c.update('{"root":"el-1","elements":{}}'); + expect(c.streaming()).toBe(false); + }); + + it('transitions from true to false when JSON completes', () => { + const c = setup(); + c.update('{"root":"el-1"'); + expect(c.streaming()).toBe(true); + + c.update('{"root":"el-1","elements":{}}'); + expect(c.streaming()).toBe(false); + }); + }); + + describe('a2ui detection', () => { + it('detects ---a2ui_JSON--- prefix', () => { + const c = setup(); + c.update('---a2ui_JSON---{"root":"el-1","elements":{}}'); + expect(c.type()).toBe('a2ui'); + }); + }); + + describe('dispose', () => { + it('can be called without errors', () => { + const c = setup(); + c.update('Hello'); + expect(() => c.dispose()).not.toThrow(); + }); + + it('can be called on fresh instance', () => { + const c = setup(); + expect(() => c.dispose()).not.toThrow(); + }); + }); +}); diff --git a/libs/chat/src/lib/streaming/content-classifier.ts b/libs/chat/src/lib/streaming/content-classifier.ts new file mode 100644 index 000000000..5feccf3bf --- /dev/null +++ b/libs/chat/src/lib/streaming/content-classifier.ts @@ -0,0 +1,156 @@ +// SPDX-License-Identifier: PolyForm-Noncommercial-1.0.0 +import { signal, type Signal } from '@angular/core'; +import type { Spec } from '@json-render/core'; +import { createPartialJsonParser } from '@cacheplane/partial-json'; +import { createParseTreeStore, type ElementAccumulationState, type ParseTreeStore } from './parse-tree-store'; + +export type ContentType = 'undetermined' | 'markdown' | 'json-render' | 'a2ui' | 'mixed'; + +const A2UI_PREFIX = '---a2ui_JSON---'; + +export interface ContentClassifier { + update(content: string): void; + readonly type: Signal<ContentType>; + readonly markdown: Signal<string>; + readonly spec: Signal<Spec | null>; + readonly elementStates: Signal<Map<string, ElementAccumulationState>>; + readonly streaming: Signal<boolean>; + dispose(): void; +} + +export function createContentClassifier(): ContentClassifier { + const typeSignal = signal<ContentType>('undetermined'); + const markdownSignal = signal<string>(''); + const specSignal = signal<Spec | null>(null); + const elementStatesSignal = signal<Map<string, ElementAccumulationState>>(new Map()); + const streamingSignal = signal<boolean>(false); + + let processedLength = 0; + let store: ParseTreeStore | null = null; + let jsonStartIndex = 0; + + function detectType(content: string): ContentType { + // Find first non-whitespace character + for (let i = 0; i < content.length; i++) { + const ch = content[i]; + if (ch === ' ' || ch === '\t' || ch === '\n' || ch === '\r') continue; + + if (content.startsWith(A2UI_PREFIX, i)) { + return 'a2ui'; + } + if (ch === '{') { + return 'json-render'; + } + return 'markdown'; + } + return 'undetermined'; + } + + function initJsonStore(jsonContent: string): void { + const parser = createPartialJsonParser(); + store = createParseTreeStore(parser); + if (jsonContent.length > 0) { + store.push(jsonContent); + } + syncJsonSignals(); + } + + function syncJsonSignals(): void { + if (!store) return; + specSignal.set(store.spec()); + elementStatesSignal.set(store.elementStates()); + + // Determine streaming state from the parser root node status + const spec = store.spec(); + if (spec) { + // Check if the root JSON object is complete by seeing if materialize produced a complete object + // We check by looking at the parse tree store's underlying parser root status + // A simpler heuristic: if the spec has both root and elements defined and the last char was }, it's likely complete + // But we can use the parser events approach. Let's check the element states for streaming. + streamingSignal.set(isStillStreaming()); + } else { + streamingSignal.set(true); + } + } + + function isStillStreaming(): boolean { + if (!store) return false; + // If the store has a spec, check if any elements are still streaming + // or if the root object itself hasn't closed yet + const states = store.elementStates(); + for (const state of states.values()) { + if (state.streaming) return true; + } + // Also check if the spec has basic completeness: root + elements + const spec = store.spec(); + if (!spec || !spec.root || !spec.elements) return true; + return false; + } + + function update(content: string): void { + const currentType = typeSignal(); + + if (currentType === 'undetermined') { + const detected = detectType(content); + if (detected === 'undetermined') return; + + typeSignal.set(detected); + + if (detected === 'markdown') { + markdownSignal.set(content); + processedLength = content.length; + } else if (detected === 'json-render') { + streamingSignal.set(true); + // Find where JSON starts (skip whitespace) + jsonStartIndex = 0; + for (let i = 0; i < content.length; i++) { + if (content[i] !== ' ' && content[i] !== '\t' && content[i] !== '\n' && content[i] !== '\r') { + jsonStartIndex = i; + break; + } + } + const jsonContent = content.slice(jsonStartIndex); + initJsonStore(jsonContent); + processedLength = content.length; + } else if (detected === 'a2ui') { + streamingSignal.set(true); + jsonStartIndex = content.indexOf(A2UI_PREFIX) + A2UI_PREFIX.length; + const jsonContent = content.slice(jsonStartIndex); + if (jsonContent.length > 0) { + initJsonStore(jsonContent); + } + processedLength = content.length; + } + return; + } + + // Compute delta + const delta = content.slice(processedLength); + processedLength = content.length; + + if (delta.length === 0) return; + + if (currentType === 'markdown' || currentType === 'mixed') { + markdownSignal.set(content); + } else if (currentType === 'json-render' || currentType === 'a2ui') { + if (store) { + store.push(delta); + syncJsonSignals(); + } + } + } + + function dispose(): void { + store = null; + } + + return { + update, + type: typeSignal.asReadonly(), + markdown: markdownSignal.asReadonly(), + spec: specSignal.asReadonly(), + elementStates: elementStatesSignal.asReadonly(), + streaming: streamingSignal.asReadonly(), + dispose, + }; +} From a19acc897b8f7ba7b13d74f274829b596ae5998a Mon Sep 17 00:00:00 2001 From: Brian Love <brian@liveloveapp.com> Date: Wed, 8 Apr 2026 13:13:41 -0700 Subject: [PATCH 12/13] feat(chat): integrate content classifier and generative UI rendering Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com> --- .../compositions/chat/chat.component.spec.ts | 66 +++++++++++++++++++ .../lib/compositions/chat/chat.component.ts | 58 ++++++++++++++-- libs/chat/src/public-api.ts | 6 ++ 3 files changed, 123 insertions(+), 7 deletions(-) diff --git a/libs/chat/src/lib/compositions/chat/chat.component.spec.ts b/libs/chat/src/lib/compositions/chat/chat.component.spec.ts index ff996799f..1216b823b 100644 --- a/libs/chat/src/lib/compositions/chat/chat.component.spec.ts +++ b/libs/chat/src/lib/compositions/chat/chat.component.spec.ts @@ -1,8 +1,10 @@ // SPDX-License-Identifier: PolyForm-Noncommercial-1.0.0 import { describe, it, expect } from 'vitest'; +import { TestBed } from '@angular/core/testing'; import { HumanMessage, AIMessage } from '@langchain/core/messages'; import { ChatComponent } from './chat.component'; import { messageContent } from '../shared/message-utils'; +import { createContentClassifier, type ContentClassifier } from '../../streaming/content-classifier'; describe('ChatComponent', () => { it('is defined as a class', () => { @@ -28,3 +30,67 @@ describe('ChatComponent', () => { expect(hasMeta || typeof ChatComponent === 'function').toBe(true); }); }); + +describe('ChatComponent — content classification', () => { + it('classifyMessage creates a classifier on first call and caches it', () => { + TestBed.configureTestingModule({}); + TestBed.runInInjectionContext(() => { + const classifiers = new Map<number, ContentClassifier>(); + function classifyMessage(content: string, index: number): ContentClassifier { + let classifier = classifiers.get(index); + if (!classifier) { + classifier = createContentClassifier(); + classifiers.set(index, classifier); + } + classifier.update(content); + return classifier; + } + const c1 = classifyMessage('Hello', 0); + const c2 = classifyMessage('Hello, world', 0); + expect(c2).toBe(c1); + expect(c1.markdown()).toBe('Hello, world'); + }); + }); + + it('different message indices get different classifiers', () => { + TestBed.configureTestingModule({}); + TestBed.runInInjectionContext(() => { + const classifiers = new Map<number, ContentClassifier>(); + function classifyMessage(content: string, index: number): ContentClassifier { + let classifier = classifiers.get(index); + if (!classifier) { + classifier = createContentClassifier(); + classifiers.set(index, classifier); + } + classifier.update(content); + return classifier; + } + const c0 = classifyMessage('Hello', 0); + const c1 = classifyMessage('{"root":"r1"}', 1); + expect(c0.type()).toBe('markdown'); + expect(c1.type()).toBe('json-render'); + }); + }); + + it('markdown messages use the fast path (no spec)', () => { + TestBed.configureTestingModule({}); + TestBed.runInInjectionContext(() => { + const c = createContentClassifier(); + c.update('Just plain markdown text'); + expect(c.type()).toBe('markdown'); + expect(c.spec()).toBeNull(); + expect(c.markdown()).toBe('Just plain markdown text'); + }); + }); + + it('JSON messages produce a spec and no markdown', () => { + TestBed.configureTestingModule({}); + TestBed.runInInjectionContext(() => { + const c = createContentClassifier(); + c.update('{"root":"r1","elements":{"r1":{"type":"Text","props":{"label":"Hi"}}}}'); + expect(c.type()).toBe('json-render'); + expect(c.spec()).not.toBeNull(); + expect(c.markdown()).toBe(''); + }); + }); +}); diff --git a/libs/chat/src/lib/compositions/chat/chat.component.ts b/libs/chat/src/lib/compositions/chat/chat.component.ts index dcfc55970..a5323835e 100644 --- a/libs/chat/src/lib/compositions/chat/chat.component.ts +++ b/libs/chat/src/lib/compositions/chat/chat.component.ts @@ -23,6 +23,9 @@ import { ChatTypingIndicatorComponent } from '../../primitives/chat-typing-indic import { ChatErrorComponent } from '../../primitives/chat-error/chat-error.component'; import { ChatInterruptComponent } from '../../primitives/chat-interrupt/chat-interrupt.component'; import { ChatThreadListComponent, Thread } from '../../primitives/chat-thread-list/chat-thread-list.component'; +import { ChatGenerativeUiComponent } from '../../primitives/chat-generative-ui/chat-generative-ui.component'; +import { toRenderRegistry } from '@cacheplane/render'; +import { createContentClassifier, type ContentClassifier } from '../../streaming/content-classifier'; import { messageContent } from '../shared/message-utils'; import { CHAT_THEME_STYLES } from '../../styles/chat-theme'; import { CHAT_MARKDOWN_STYLES, renderMarkdown } from '../../styles/chat-markdown'; @@ -38,6 +41,7 @@ import { CHAT_MARKDOWN_STYLES, renderMarkdown } from '../../styles/chat-markdown ChatErrorComponent, ChatInterruptComponent, ChatThreadListComponent, + ChatGenerativeUiComponent, ], changeDetection: ChangeDetectionStrategy.OnPush, styles: [CHAT_THEME_STYLES, CHAT_MARKDOWN_STYLES], @@ -109,18 +113,33 @@ import { CHAT_MARKDOWN_STYLES, renderMarkdown } from '../../styles/chat-markdown </div> </ng-template> - <!-- AI messages: avatar inline with content (ChatGPT pattern) --> - <ng-template chatMessageTemplate="ai" let-message> + <!-- AI messages: classified rendering (markdown + generative UI) --> + <ng-template chatMessageTemplate="ai" let-message let-index="index"> + @let content = messageContent(message); + @let classified = classifyMessage(content, index); <div class="flex gap-3"> <div class="w-7 h-7 flex items-center justify-center text-xs font-semibold shrink-0 mt-0.5" style="background: var(--chat-avatar-bg); color: var(--chat-avatar-text); border-radius: var(--chat-radius-avatar);" >A</div> - <div - class="chat-md flex-1 min-w-0 break-words text-[length:var(--chat-font-size)] leading-[var(--chat-line-height)]" - style="color: var(--chat-text);" - [innerHTML]="renderMd(messageContent(message))" - ></div> + <div class="flex-1 min-w-0 flex flex-col gap-2"> + @if (classified.markdown(); as md) { + <div + class="chat-md break-words text-[length:var(--chat-font-size)] leading-[var(--chat-line-height)]" + style="color: var(--chat-text);" + [innerHTML]="renderMd(md)" + ></div> + } + + @if (classified.spec(); as spec) { + <chat-generative-ui + [spec]="spec" + [registry]="renderRegistry()" + [store]="store()" + [loading]="ref().isLoading()" + /> + } + </div> </div> </ng-template> @@ -186,6 +205,14 @@ export class ChatComponent { readonly sidebarOpen = signal(false); + private readonly classifiers = new Map<number, ContentClassifier>(); + + /** Convert ViewRegistry → AngularRegistry for ChatGenerativeUiComponent. */ + readonly renderRegistry = computed(() => { + const v = this.views(); + return v ? toRenderRegistry(v) : undefined; + }); + readonly messageContent = messageContent; private readonly scrollContainer = viewChild<ElementRef<HTMLElement>>('scrollContainer'); @@ -217,6 +244,23 @@ export class ChatComponent { }); } + classifyMessage(content: string, index: number): ContentClassifier { + let classifier = this.classifiers.get(index); + if (!classifier) { + classifier = createContentClassifier(); + this.classifiers.set(index, classifier); + } + classifier.update(content); + return classifier; + } + + clearClassifiers(): void { + for (const [, c] of this.classifiers) { + c.dispose(); + } + this.classifiers.clear(); + } + renderMd(content: string) { return renderMarkdown(content, this.sanitizer); } diff --git a/libs/chat/src/public-api.ts b/libs/chat/src/public-api.ts index 2abb86b00..02474efa9 100644 --- a/libs/chat/src/public-api.ts +++ b/libs/chat/src/public-api.ts @@ -56,5 +56,11 @@ export { views, withViews, withoutViews, toRenderRegistry } from '@cacheplane/re export type { ViewRegistry } from '@cacheplane/render'; export { provideViews, VIEW_REGISTRY } from '@cacheplane/render'; +// Streaming / Generative UI +export { createContentClassifier } from './lib/streaming/content-classifier'; +export type { ContentClassifier, ContentType } from './lib/streaming/content-classifier'; +export { createParseTreeStore } from './lib/streaming/parse-tree-store'; +export type { ParseTreeStore, ElementAccumulationState } from './lib/streaming/parse-tree-store'; + // Test utilities export { createMockAgentRef } from './lib/testing/mock-agent-ref'; From ecfe7a7d20a05d16f16fd6cfa46b46b5c29a0c02 Mon Sep 17 00:00:00 2001 From: Brian Love <brian@liveloveapp.com> Date: Wed, 8 Apr 2026 13:15:21 -0700 Subject: [PATCH 13/13] fix(chat): add @cacheplane/partial-json peer dependency Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com> --- libs/chat/package.json | 1 + 1 file changed, 1 insertion(+) diff --git a/libs/chat/package.json b/libs/chat/package.json index 01c36cc47..df1ebd7fc 100644 --- a/libs/chat/package.json +++ b/libs/chat/package.json @@ -6,6 +6,7 @@ "@angular/common": "^20.0.0 || ^21.0.0", "@angular/forms": "^20.0.0 || ^21.0.0", "@cacheplane/render": "^0.0.1", + "@cacheplane/partial-json": "^0.0.1", "@cacheplane/angular": "^0.0.1", "@json-render/core": "^0.16.0", "@langchain/core": "^1.1.33",