From 4e4bae367291f1f0bd443ebd597a55ed47f3e967 Mon Sep 17 00:00:00 2001 From: Wojtek Majewski Date: Thu, 27 Nov 2025 13:43:48 +0100 Subject: [PATCH] update docs for auto-compilation flow and remove flows.ts --- .../__tests__/commands/compile/index.test.ts | 6 +- .../install/create-edge-function.test.ts | 28 ++---- pkgs/cli/src/commands/compile/index.ts | 9 +- .../commands/install/create-edge-function.ts | 23 +---- pkgs/cli/supabase/functions/pgflow/flows.ts | 5 - pkgs/cli/supabase/functions/pgflow/index.ts | 4 +- pkgs/edge-worker/src/control-plane/index.ts | 4 +- pkgs/edge-worker/src/control-plane/server.ts | 21 +--- pkgs/website/astro.config.mjs | 8 ++ pkgs/website/src/content/docs/build/index.mdx | 5 + .../src/content/docs/concepts/compilation.mdx | 88 +++++++++++++++++ .../src/content/docs/concepts/index.mdx | 5 + .../background-jobs/create-worker.mdx | 9 +- .../docs/get-started/flows/compile-flow.mdx | 95 +++++++++++-------- .../docs/get-started/flows/create-flow.mdx | 22 ++++- .../docs/get-started/flows/run-flow.mdx | 4 + .../content/docs/get-started/installation.mdx | 6 +- ...ntrol-plane-and-http-based-compilation.mdx | 45 +++++++++ .../docs/reference/control-plane-api.mdx | 94 ++++++++++++++++++ .../src/content/docs/reference/index.mdx | 5 + .../docs/tutorials/ai-web-scraper/backend.mdx | 7 +- 21 files changed, 366 insertions(+), 127 deletions(-) delete mode 100644 pkgs/cli/supabase/functions/pgflow/flows.ts create mode 100644 pkgs/website/src/content/docs/concepts/compilation.mdx create mode 100644 pkgs/website/src/content/docs/news/pgflow-0-9-0-control-plane-and-http-based-compilation.mdx create mode 100644 pkgs/website/src/content/docs/reference/control-plane-api.mdx diff --git a/pkgs/cli/__tests__/commands/compile/index.test.ts b/pkgs/cli/__tests__/commands/compile/index.test.ts index f77fda0a4..53bdfad0c 100644 --- a/pkgs/cli/__tests__/commands/compile/index.test.ts +++ b/pkgs/cli/__tests__/commands/compile/index.test.ts @@ -72,7 +72,7 @@ describe('fetchFlowSQL', () => { status: 404, json: async () => ({ error: 'Flow Not Found', - message: "Flow 'unknown_flow' not found. Did you add it to flows.ts?", + message: "Flow 'unknown_flow' not found. Did you add it to supabase/functions/pgflow/index.ts?", }), }; @@ -83,7 +83,7 @@ describe('fetchFlowSQL', () => { ).rejects.toThrow("Flow 'unknown_flow' not found"); await expect( fetchFlowSQL('unknown_flow', 'http://127.0.0.1:50621/functions/v1/pgflow', 'test-publishable-key') - ).rejects.toThrow('Add your flow to supabase/functions/pgflow/flows.ts'); + ).rejects.toThrow('Add your flow to supabase/functions/pgflow/index.ts'); }); it('should handle ECONNREFUSED with startup instructions', async () => { @@ -167,7 +167,7 @@ describe('fetchFlowSQL', () => { ).rejects.toThrow("Flow 'unknown_flow' not found"); await expect( fetchFlowSQL('unknown_flow', 'http://127.0.0.1:50621/functions/v1/pgflow', 'test-publishable-key') - ).rejects.toThrow('Did you add it to flows.ts'); + ).rejects.toThrow('Did you add it to supabase/functions/pgflow/index.ts'); }); it('should construct correct URL with flow slug', async () => { diff --git a/pkgs/cli/__tests__/commands/install/create-edge-function.test.ts b/pkgs/cli/__tests__/commands/install/create-edge-function.test.ts index 618152557..5f6d3fa17 100644 --- a/pkgs/cli/__tests__/commands/install/create-edge-function.test.ts +++ b/pkgs/cli/__tests__/commands/install/create-edge-function.test.ts @@ -22,7 +22,7 @@ describe('createEdgeFunction', () => { fs.rmSync(tempDir, { recursive: true, force: true }); }); - it('should create all three files when none exist', async () => { + it('should create both files when none exist', async () => { const result = await createEdgeFunction({ supabasePath, autoConfirm: true, @@ -36,23 +36,16 @@ describe('createEdgeFunction', () => { // Verify all files exist const indexPath = path.join(pgflowFunctionDir, 'index.ts'); - const flowsPath = path.join(pgflowFunctionDir, 'flows.ts'); const denoJsonPath = path.join(pgflowFunctionDir, 'deno.json'); expect(fs.existsSync(indexPath)).toBe(true); - expect(fs.existsSync(flowsPath)).toBe(true); expect(fs.existsSync(denoJsonPath)).toBe(true); - // Verify index.ts content + // Verify index.ts content (inline flow registration, no flows.ts) const indexContent = fs.readFileSync(indexPath, 'utf8'); expect(indexContent).toContain("import { ControlPlane } from '@pgflow/edge-worker'"); - expect(indexContent).toContain("import { flows } from './flows.ts'"); - expect(indexContent).toContain('ControlPlane.serve(flows)'); - - // Verify flows.ts content - const flowsContent = fs.readFileSync(flowsPath, 'utf8'); - expect(flowsContent).toContain('export const flows = ['); - expect(flowsContent).toContain('// Import your flows here'); + expect(indexContent).toContain('ControlPlane.serve(['); + expect(indexContent).toContain('// Import your flows here'); // Verify deno.json content const denoJsonContent = fs.readFileSync(denoJsonPath, 'utf8'); @@ -66,11 +59,9 @@ describe('createEdgeFunction', () => { fs.mkdirSync(pgflowFunctionDir, { recursive: true }); const indexPath = path.join(pgflowFunctionDir, 'index.ts'); - const flowsPath = path.join(pgflowFunctionDir, 'flows.ts'); const denoJsonPath = path.join(pgflowFunctionDir, 'deno.json'); fs.writeFileSync(indexPath, '// existing content'); - fs.writeFileSync(flowsPath, '// existing content'); fs.writeFileSync(denoJsonPath, '// existing content'); const result = await createEdgeFunction({ @@ -83,7 +74,6 @@ describe('createEdgeFunction', () => { // Verify files still exist with original content expect(fs.readFileSync(indexPath, 'utf8')).toBe('// existing content'); - expect(fs.readFileSync(flowsPath, 'utf8')).toBe('// existing content'); expect(fs.readFileSync(denoJsonPath, 'utf8')).toBe('// existing content'); }); @@ -92,7 +82,6 @@ describe('createEdgeFunction', () => { fs.mkdirSync(pgflowFunctionDir, { recursive: true }); const indexPath = path.join(pgflowFunctionDir, 'index.ts'); - const flowsPath = path.join(pgflowFunctionDir, 'flows.ts'); const denoJsonPath = path.join(pgflowFunctionDir, 'deno.json'); // Only create index.ts @@ -103,19 +92,15 @@ describe('createEdgeFunction', () => { autoConfirm: true, }); - // Should return true because some files were created + // Should return true because deno.json was created expect(result).toBe(true); // Verify index.ts was not modified expect(fs.readFileSync(indexPath, 'utf8')).toBe('// existing content'); - // Verify flows.ts and deno.json were created - expect(fs.existsSync(flowsPath)).toBe(true); + // Verify deno.json was created expect(fs.existsSync(denoJsonPath)).toBe(true); - const flowsContent = fs.readFileSync(flowsPath, 'utf8'); - expect(flowsContent).toContain('export const flows = ['); - const denoJsonContent = fs.readFileSync(denoJsonPath, 'utf8'); expect(denoJsonContent).toContain('"imports"'); }); @@ -138,7 +123,6 @@ describe('createEdgeFunction', () => { // Verify files exist expect(fs.existsSync(path.join(pgflowFunctionDir, 'index.ts'))).toBe(true); - expect(fs.existsSync(path.join(pgflowFunctionDir, 'flows.ts'))).toBe(true); expect(fs.existsSync(path.join(pgflowFunctionDir, 'deno.json'))).toBe(true); }); diff --git a/pkgs/cli/src/commands/compile/index.ts b/pkgs/cli/src/commands/compile/index.ts index 91c641b23..72cb5f97f 100644 --- a/pkgs/cli/src/commands/compile/index.ts +++ b/pkgs/cli/src/commands/compile/index.ts @@ -30,9 +30,9 @@ export async function fetchFlowSQL( const errorData = await response.json(); throw new Error( `Flow '${flowSlug}' not found.\n\n` + - `${errorData.message || 'Did you add it to flows.ts?'}\n\n` + + `${errorData.message || 'Did you add it to supabase/functions/pgflow/index.ts?'}\n\n` + `Fix:\n` + - `1. Add your flow to supabase/functions/pgflow/flows.ts\n` + + `1. Add your flow to supabase/functions/pgflow/index.ts\n` + `2. Restart edge functions: supabase functions serve` ); } @@ -45,11 +45,6 @@ export async function fetchFlowSQL( return await response.json(); } catch (error) { if (error instanceof Error) { - // Debug: show actual error and URL - console.error(`[DEBUG] Fetch failed for URL: ${url}`); - console.error(`[DEBUG] Error message: ${error.message}`); - console.error(`[DEBUG] Error cause:`, (error as any).cause); - // Check for connection refused errors if ( error.message.includes('ECONNREFUSED') || diff --git a/pkgs/cli/src/commands/install/create-edge-function.ts b/pkgs/cli/src/commands/install/create-edge-function.ts index 1ad7bec09..7fe3d343f 100644 --- a/pkgs/cli/src/commands/install/create-edge-function.ts +++ b/pkgs/cli/src/commands/install/create-edge-function.ts @@ -5,18 +5,13 @@ import chalk from 'chalk'; import { getVersion } from '../../utils/get-version.js'; const INDEX_TS_TEMPLATE = `import { ControlPlane } from '@pgflow/edge-worker'; -import { flows } from './flows.ts'; - -ControlPlane.serve(flows); -`; - -const FLOWS_TS_TEMPLATE = `// Import your flows here +// Import your flows here: // import { MyFlow } from '../_flows/my_flow.ts'; -// Export flows array for ControlPlane -export const flows = [ +ControlPlane.serve([ + // Add your flows here: // MyFlow, -]; +]); `; const DENO_JSON_TEMPLATE = (version: string) => `{ @@ -44,7 +39,6 @@ export async function createEdgeFunction({ const pgflowFunctionDir = path.join(functionsDir, 'pgflow'); const indexPath = path.join(pgflowFunctionDir, 'index.ts'); - const flowsPath = path.join(pgflowFunctionDir, 'flows.ts'); const denoJsonPath = path.join(pgflowFunctionDir, 'deno.json'); // Check what needs to be created @@ -54,10 +48,6 @@ export async function createEdgeFunction({ filesToCreate.push({ path: indexPath, name: 'index.ts' }); } - if (!fs.existsSync(flowsPath)) { - filesToCreate.push({ path: flowsPath, name: 'flows.ts' }); - } - if (!fs.existsSync(denoJsonPath)) { filesToCreate.push({ path: denoJsonPath, name: 'deno.json' }); } @@ -69,7 +59,6 @@ export async function createEdgeFunction({ const detailedMsg = [ 'Existing files:', ` ${chalk.dim('•')} ${chalk.bold('supabase/functions/pgflow/index.ts')}`, - ` ${chalk.dim('•')} ${chalk.bold('supabase/functions/pgflow/flows.ts')}`, ` ${chalk.dim('•')} ${chalk.bold('supabase/functions/pgflow/deno.json')}`, ].join('\n'); @@ -113,10 +102,6 @@ export async function createEdgeFunction({ fs.writeFileSync(indexPath, INDEX_TS_TEMPLATE); } - if (filesToCreate.some((f) => f.path === flowsPath)) { - fs.writeFileSync(flowsPath, FLOWS_TS_TEMPLATE); - } - if (filesToCreate.some((f) => f.path === denoJsonPath)) { fs.writeFileSync(denoJsonPath, DENO_JSON_TEMPLATE(getVersion())); } diff --git a/pkgs/cli/supabase/functions/pgflow/flows.ts b/pkgs/cli/supabase/functions/pgflow/flows.ts deleted file mode 100644 index b70e978c5..000000000 --- a/pkgs/cli/supabase/functions/pgflow/flows.ts +++ /dev/null @@ -1,5 +0,0 @@ -// Import your flows here -import { TestFlowE2E } from '../_flows/test_flow_e2e.ts'; - -// Export flows array for ControlPlane -export const flows = [TestFlowE2E]; diff --git a/pkgs/cli/supabase/functions/pgflow/index.ts b/pkgs/cli/supabase/functions/pgflow/index.ts index 5234ae0d9..d2c3d840a 100644 --- a/pkgs/cli/supabase/functions/pgflow/index.ts +++ b/pkgs/cli/supabase/functions/pgflow/index.ts @@ -1,4 +1,4 @@ import { ControlPlane } from '@pgflow/edge-worker'; -import { flows } from './flows.ts'; +import { TestFlowE2E } from '../_flows/test_flow_e2e.ts'; -ControlPlane.serve(flows); +ControlPlane.serve([TestFlowE2E]); diff --git a/pkgs/edge-worker/src/control-plane/index.ts b/pkgs/edge-worker/src/control-plane/index.ts index 0f1107dca..54914fc8f 100644 --- a/pkgs/edge-worker/src/control-plane/index.ts +++ b/pkgs/edge-worker/src/control-plane/index.ts @@ -7,9 +7,9 @@ * @example * ```typescript * import { ControlPlane } from '@pgflow/edge-worker'; - * import { flows } from './flows.ts'; + * import { MyFlow } from '../_flows/my_flow.ts'; * - * ControlPlane.serve(flows); + * ControlPlane.serve([MyFlow]); * ``` */ diff --git a/pkgs/edge-worker/src/control-plane/server.ts b/pkgs/edge-worker/src/control-plane/server.ts index 417e18448..2dd52ec24 100644 --- a/pkgs/edge-worker/src/control-plane/server.ts +++ b/pkgs/edge-worker/src/control-plane/server.ts @@ -85,10 +85,7 @@ export function serveControlPlane(flows: AnyFlow[]): void { /** * Handles GET /flows/:slug requests */ -function handleGetFlow( - registry: Map, - slug: string -): Response { +function handleGetFlow(registry: Map, slug: string): Response { try { const flow = registry.get(slug); @@ -96,7 +93,7 @@ function handleGetFlow( return jsonResponse( { error: 'Flow Not Found', - message: `Flow '${slug}' not found. Did you add it to flows.ts?`, + message: `Flow '${slug}' not found. Did you add it to supabase/functions/pgflow/index.ts?`, }, 404 ); @@ -134,17 +131,3 @@ function jsonResponse(data: unknown, status: number): Response { }, }); } - -/** - * ControlPlane class for serving flow compilation HTTP API - */ -export class ControlPlane { - /** - * Serves the ControlPlane HTTP API for flow compilation - * @param flows Array of flow definitions to register - */ - static serve(flows: AnyFlow[]): void { - const handler = createControlPlaneHandler(flows); - Deno.serve({}, handler); - } -} diff --git a/pkgs/website/astro.config.mjs b/pkgs/website/astro.config.mjs index 3564aad56..f6c7bb756 100644 --- a/pkgs/website/astro.config.mjs +++ b/pkgs/website/astro.config.mjs @@ -321,6 +321,10 @@ export default defineConfig({ link: '/concepts/three-layer-architecture/', }, { label: 'Data model', link: '/concepts/data-model/' }, + { + label: 'Compilation', + link: '/concepts/compilation/', + }, ], }, { @@ -366,6 +370,10 @@ export default defineConfig({ }, { label: 'Context API', link: '/reference/context/' }, { label: 'Compile API', link: '/reference/compile-api/' }, + { + label: 'ControlPlane API', + link: '/reference/control-plane-api/', + }, { label: 'Manual installation', link: '/reference/manual-installation/', diff --git a/pkgs/website/src/content/docs/build/index.mdx b/pkgs/website/src/content/docs/build/index.mdx index ae67a22c8..4875aead5 100644 --- a/pkgs/website/src/content/docs/build/index.mdx +++ b/pkgs/website/src/content/docs/build/index.mdx @@ -35,6 +35,11 @@ Now that you've created your first flow, learn how to structure your code, integ href="/build/process-arrays-in-parallel/" description="Process arrays of data in parallel using map steps" /> + ## Starting Flows diff --git a/pkgs/website/src/content/docs/concepts/compilation.mdx b/pkgs/website/src/content/docs/concepts/compilation.mdx new file mode 100644 index 000000000..2b533083d --- /dev/null +++ b/pkgs/website/src/content/docs/concepts/compilation.mdx @@ -0,0 +1,88 @@ +--- +title: Compilation +description: How pgflow compiles TypeScript flows to SQL via HTTP +sidebar: + order: 25 +--- + +import { Aside } from "@astrojs/starlight/components"; + +pgflow compiles TypeScript flow definitions to SQL migrations via an HTTP-based architecture. This design eliminates the need for a local Deno installation and ensures compilation uses the same runtime as production. + +## How It Works + +When you run `pgflow compile greet_user`, the following happens: + +``` +┌─────────────┐ HTTP GET ┌─────────────────────┐ +│ pgflow CLI │ ─────────────────>│ ControlPlane Edge │ +│ │ │ Function │ +│ │ │ │ +│ │ SQL Array │ 1. Look up flow │ +│ │ <─────────────────│ 2. Call compileFlow│ +│ │ │ 3. Return SQL │ +│ │ └─────────────────────┘ +│ 4. Write │ +│ migration │ +└─────────────┘ +``` + +1. **CLI sends request** - The compile command sends an HTTP GET request to: + `http://127.0.0.1:54321/functions/v1/pgflow/flows/{slug}` + +2. **ControlPlane looks up flow** - The edge function has a registry of flows (from your `index.ts`). It finds the flow by slug. + +3. **Compilation happens in Deno** - The ControlPlane calls `compileFlow()` from `@pgflow/dsl`, which extracts the flow structure and generates SQL. + +4. **SQL returned to CLI** - The response contains an array of SQL statements. + +5. **CLI writes migration** - The CLI joins the SQL and writes it to `supabase/migrations/{timestamp}_create_{slug}_flow.sql`. + +## The ControlPlane Edge Function + +The `pgflow` edge function is created during installation and serves as your project's ControlPlane: + +```typescript title="supabase/functions/pgflow/index.ts" +import { ControlPlane } from '@pgflow/edge-worker'; +import GreetUser from '../_flows/greet_user.ts'; +import ProcessOrder from '../_flows/process_order.ts'; + +ControlPlane.serve([ + GreetUser, + ProcessOrder, +]); +``` + +The ControlPlane: +- **Registers flows** by slug in an in-memory registry +- **Exposes** the `/flows/:slug` endpoint for compilation +- **Returns 404** if a flow slug is not found in the registry +- **Returns 500** with error details if compilation fails + +## Why HTTP-Based Compilation? + +This architecture provides several benefits: + +**No local Deno required** - Users don't need Deno installed on their machine. The Supabase Edge Functions runtime handles everything. + +**Same runtime as production** - Flows are compiled using the exact same Deno environment they'll run in, eliminating "works on my machine" issues. + +**Consistent dependency resolution** - The `deno.json` import map in your edge function ensures consistent package versions. + +**Simpler CLI** - The CLI is a lightweight Node.js package that makes HTTP requests, rather than needing to bundle the entire compilation infrastructure. + +## Adding New Flows + +To make a flow available for compilation: + +1. Create the flow definition in `_flows/` +2. Import it in `supabase/functions/pgflow/index.ts` +3. Add it to the `ControlPlane.serve([...])` array + + + +## API Reference + +For detailed HTTP endpoint documentation, see [ControlPlane API Reference](/reference/control-plane-api/). diff --git a/pkgs/website/src/content/docs/concepts/index.mdx b/pkgs/website/src/content/docs/concepts/index.mdx index 9ef11f6ab..d20ef175c 100644 --- a/pkgs/website/src/content/docs/concepts/index.mdx +++ b/pkgs/website/src/content/docs/concepts/index.mdx @@ -29,6 +29,11 @@ Steps execute through **tasks** (the actual units of work) - regular steps have href="/concepts/data-model/" description="Understanding pgflow's database schema design and table relationships" /> + ## Defining Flows diff --git a/pkgs/website/src/content/docs/get-started/background-jobs/create-worker.mdx b/pkgs/website/src/content/docs/get-started/background-jobs/create-worker.mdx index a2a9a523f..254506bdf 100644 --- a/pkgs/website/src/content/docs/get-started/background-jobs/create-worker.mdx +++ b/pkgs/website/src/content/docs/get-started/background-jobs/create-worker.mdx @@ -54,11 +54,6 @@ Before starting, please read the [Installation](/get-started/installation/) guid This makes Supabase listen for incoming HTTP requests, but does not start your worker yet. - :::note[Restarting Edge Runtime] - You must stop and start the Edge Runtime every time you make changes - to your workers because of the `per_worker` policy. - ::: - 1. ### Start your worker Start the worker by sending an HTTP request to your new Edge Function @@ -74,6 +69,10 @@ Before starting, please read the [Installation](/get-started/installation/) guid [Info] worker_id= [WorkerLifecycle] Ensuring queue 'tasks' exists... ``` + :::tip[Seamless local development] + Tired of curling after every code change? Add the [watchdog cron](/deploy/supabase/keep-workers-running/) to your `supabase/seed.sql` (use `'2 seconds'` interval for fast local iteration) - it auto-restarts workers when they stop polling. + ::: + 4. ### Process your first message Your worker is now polling for messages on the `tasks` queue (which was automatically created during startup). diff --git a/pkgs/website/src/content/docs/get-started/flows/compile-flow.mdx b/pkgs/website/src/content/docs/get-started/flows/compile-flow.mdx index 4ab156783..c23c62aff 100644 --- a/pkgs/website/src/content/docs/get-started/flows/compile-flow.mdx +++ b/pkgs/website/src/content/docs/get-started/flows/compile-flow.mdx @@ -7,67 +7,56 @@ sidebar: import { Aside, Steps } from "@astrojs/starlight/components"; -Now that we've defined our flow, we need to register it in the database. pgflow provides a CLI tool that compiles your TypeScript flow definition into SQL migrations that can be applied to your Supabase database. +Now that we've defined our flow and registered it, we need to compile it to SQL and apply it to your database. -## What is compilation? - -pgflow compiles your TypeScript flow definition into SQL migrations that register the flow in your database. - -1. Analyze TypeScript flow definition -2. Extract steps, dependencies, and options -3. Generate SQL registration commands -4. Create migration file +1. ### Start edge functions - + Open a terminal and start the edge functions server: -The runtime executes flows based on their database representation, not the TypeScript code directly. + ```bash frame="none" + npx supabase functions serve + ``` - + -1. ### Compile the flow to SQL +2. ### Compile the flow - Run the pgflow compile command, pointing to your flow definition file: + In a **separate terminal**, run the compile command with the flow slug: ```bash frame="none" - npx pgflow@latest compile supabase/functions/_flows/greet_user.ts + npx pgflow@latest compile greet_user ``` - This will: - - Parse your TypeScript flow definition - - Extract the flow structure, step dependencies, and configuration - - Generate SQL commands to register the flow in the database - - Create a timestamped migration file in your Supabase migrations folder + You should see output like this: ``` - ✓ Successfully compiled flow to SQL - ✓ Migration file created: supabase/migrations/20250505120000_create_greet_user_flow.sql + Successfully compiled flow to SQL + Migration file created: supabase/migrations/20250505120000_create_greet_user_flow.sql ``` -2. ### Examine the generated SQL +3. ### Examine the generated SQL (optional) - Let's look at what got generated. Open the migration file in your editor: + Let's look at what got generated: ```bash frame="none" cat supabase/migrations/*_create_greet_user_flow.sql ``` - The migration file contains SQL commands that: - - 1. Create the flow definition - 2. Add each step with its configuration - 3. Define dependencies between steps - The generated SQL looks like this: ```sql @@ -76,23 +65,19 @@ The runtime executes flows based on their database representation, not the TypeS SELECT pgflow.add_step('greet_user', 'greeting', ARRAY['full_name']); ``` - This SQL representation is what the pgflow runtime system uses to execute your workflow. - :::tip[Step Configuration] This is a simple example showing basic step registration. Steps can also be configured with retry behavior, timeouts, and delays. See [Step Execution Options](/reference/configuration/step-execution/) for all available configuration options. ::: -3. ### Apply the migration +4. ### Apply the migration - Now that we have the SQL migration, we need to apply it to our database: + Apply the migration to register the flow in your database: ```bash frame="none" npx supabase migrations up ``` - This will execute the SQL migration and register your flow definition in the database. - - If successful, you should see output like this: + You should see: ``` Applying migration 20250505120000_create_greet_user_flow.sql...done @@ -100,6 +85,40 @@ The runtime executes flows based on their database representation, not the TypeS +:::note[How compilation works] +The compile command sends an HTTP request to your local `pgflow` edge function, which compiles the TypeScript and returns SQL. This approach uses the same Deno runtime as production, ensuring consistency. + +
+Learn more + +When you run `pgflow compile greet_user`: + +1. The CLI sends a request to `http://127.0.0.1:54321/functions/v1/pgflow/flows/greet_user` +2. The ControlPlane edge function looks up the flow in its registry +3. It compiles the flow to SQL using `@pgflow/dsl` +4. The CLI receives the SQL and writes it to a migration file + +See [Compilation](/concepts/compilation/) for more details. +
+::: + +## Troubleshooting + +### "Could not connect to ControlPlane" + +Make sure edge functions are running: + +```bash frame="none" +npx supabase functions serve +``` + +### "Flow not found" + +The flow is not registered in `index.ts`. Make sure you: + +1. Imported your flow in `supabase/functions/pgflow/index.ts` +2. Added it to the `ControlPlane.serve([...])` array + :::note[Flow definitions are immutable] Once a flow is registered in the database, its structure cannot be modified. To change a flow, you can either [delete it](/build/delete-flows/) (development only) or use [versioning](/build/version-flows/). diff --git a/pkgs/website/src/content/docs/get-started/flows/create-flow.mdx b/pkgs/website/src/content/docs/get-started/flows/create-flow.mdx index cda6c68e8..4d3d85284 100644 --- a/pkgs/website/src/content/docs/get-started/flows/create-flow.mdx +++ b/pkgs/website/src/content/docs/get-started/flows/create-flow.mdx @@ -37,8 +37,11 @@ Our flow will: - supabase - functions + - pgflow + - index.ts (register flows here) + - deno.json - _flows - - greet_user.ts + - greet_user.ts (your flow definition) 2. ### Create the flow definition @@ -66,6 +69,23 @@ Our flow will: ); ``` +3. ### Register the flow + + Open `supabase/functions/pgflow/index.ts` and replace the commented example with your flow: + + ```diff lang="typescript" title="supabase/functions/pgflow/index.ts" + import { ControlPlane } from '@pgflow/edge-worker'; + -// Import your flows here: + -// import { MyFlow } from '../_flows/my_flow.ts'; + +import GreetUser from '../_flows/greet_user.ts'; + + ControlPlane.serve([ + - // Add your flows here: + - // MyFlow, + + GreetUser, + ]); + ``` + :::note[Key Concepts] diff --git a/pkgs/website/src/content/docs/get-started/flows/run-flow.mdx b/pkgs/website/src/content/docs/get-started/flows/run-flow.mdx index bbfc76629..2215b01bd 100644 --- a/pkgs/website/src/content/docs/get-started/flows/run-flow.mdx +++ b/pkgs/website/src/content/docs/get-started/flows/run-flow.mdx @@ -97,6 +97,10 @@ Before starting, make sure you have completed: If you have troubles starting the worker, make sure you disabled JWT verification in the previous step. + :::tip[Seamless local development] + Tired of curling after every code change? Add the [watchdog cron](/deploy/supabase/keep-workers-running/) to your `supabase/seed.sql` (use `'2 seconds'` interval for fast local iteration) - it auto-restarts workers when they stop polling. + ::: + 5. ### Trigger your first flow Now let's start a flow run! Using Supabase Studio: diff --git a/pkgs/website/src/content/docs/get-started/installation.mdx b/pkgs/website/src/content/docs/get-started/installation.mdx index 223fb6f00..7889afaff 100644 --- a/pkgs/website/src/content/docs/get-started/installation.mdx +++ b/pkgs/website/src/content/docs/get-started/installation.mdx @@ -15,8 +15,6 @@ Let's set up pgflow in your Supabase project. This setup needs to be done only o @@ -31,6 +29,10 @@ If you haven't installed the CLI yet or need to upgrade, see Supabase's [install npx pgflow@latest install ``` + + Want to understand what's happening? See the [manual installation guide](/reference/manual-installation/). 2. ### Apply configuration changes diff --git a/pkgs/website/src/content/docs/news/pgflow-0-9-0-control-plane-and-http-based-compilation.mdx b/pkgs/website/src/content/docs/news/pgflow-0-9-0-control-plane-and-http-based-compilation.mdx new file mode 100644 index 000000000..c34534211 --- /dev/null +++ b/pkgs/website/src/content/docs/news/pgflow-0-9-0-control-plane-and-http-based-compilation.mdx @@ -0,0 +1,45 @@ +--- +draft: false +title: 'pgflow 0.9.0: Control Plane and HTTP-Based Compilation' +description: 'Simpler flow compilation via ControlPlane edge function - no local Deno required' +date: 2025-11-27 +authors: + - jumski +tags: + - release + - control-plane + - compilation +featured: true +--- + +import { Aside } from "@astrojs/starlight/components"; + +pgflow 0.9.0 introduces the ControlPlane edge function, enabling HTTP-based flow compilation without requiring a local Deno installation. + +## What Changed + +The CLI no longer spawns Deno processes locally. Instead, compilation requests go through the ControlPlane edge function: + +``` +pgflow compile my_flow --> ControlPlane --> SQL migration +``` + +This simplifies the development setup and lays groundwork for future auto-compilation features where workers will verify and compile flows at startup without any CLI involvement. + + + +## Upgrading + +Update your packages following the [Update pgflow guide](/deploy/update-pgflow/), then run: + +```bash frame="none" +npx pgflow@0.9.0 install +``` + +This creates the ControlPlane edge function at `supabase/functions/control-plane/`. + + diff --git a/pkgs/website/src/content/docs/reference/control-plane-api.mdx b/pkgs/website/src/content/docs/reference/control-plane-api.mdx new file mode 100644 index 000000000..35cece971 --- /dev/null +++ b/pkgs/website/src/content/docs/reference/control-plane-api.mdx @@ -0,0 +1,94 @@ +--- +title: ControlPlane API +description: HTTP endpoints exposed by the ControlPlane edge function for flow compilation +sidebar: + order: 106 +--- + +import { Aside } from "@astrojs/starlight/components"; + +The ControlPlane edge function exposes HTTP endpoints for compiling TypeScript flows to SQL. The `pgflow compile` CLI uses these endpoints internally. + + + +## Base URL + +**Local development:** +``` +http://127.0.0.1:54321/functions/v1/pgflow +``` + +**Production (Supabase.com):** +``` +https://.supabase.co/functions/v1/pgflow +``` + +## Endpoints + +### GET /flows/:slug + +Compiles a flow definition to SQL statements. + +**Request:** +``` +GET /functions/v1/pgflow/flows/greet_user +Authorization: Bearer +``` + +**Success Response (200):** +```json +{ + "flowSlug": "greet_user", + "sql": [ + "SELECT pgflow.create_flow('greet_user');", + "SELECT pgflow.add_step('greet_user', 'full_name', 'single', 0, NULL);", + "SELECT pgflow.add_step('greet_user', 'greeting', 'single', 0, ARRAY['full_name']);" + ] +} +``` + +**Flow Not Found (404):** +```json +{ + "error": "Flow Not Found", + "message": "Flow 'unknown' not found. Did you add it to supabase/functions/pgflow/index.ts?" +} +``` + +**Compilation Error (500):** +```json +{ + "error": "Compilation Error", + "message": "Detailed error message from compileFlow()" +} +``` + +## Response Fields + +| Field | Type | Description | +|-------|------|-------------| +| `flowSlug` | `string` | The slug identifier of the compiled flow | +| `sql` | `string[]` | Array of SQL statements to execute in order | +| `error` | `string` | Error type (only present on error responses) | +| `message` | `string` | Human-readable error description (only present on error responses) | + +## Authentication + +The ControlPlane requires a valid Supabase anon key in the `Authorization` header. For local development, this is your project's local anon key from `supabase status`. + +## Usage with CLI + +The `pgflow compile` command handles authentication and request formatting automatically: + +```bash frame="none" +npx pgflow@latest compile greet_user +``` + +For custom tooling or debugging, you can call the endpoint directly: + +```bash frame="none" +curl -H "Authorization: Bearer $SUPABASE_ANON_KEY" \ + http://127.0.0.1:54321/functions/v1/pgflow/flows/greet_user +``` diff --git a/pkgs/website/src/content/docs/reference/index.mdx b/pkgs/website/src/content/docs/reference/index.mdx index 7ec2edde9..9e5a14081 100644 --- a/pkgs/website/src/content/docs/reference/index.mdx +++ b/pkgs/website/src/content/docs/reference/index.mdx @@ -55,6 +55,11 @@ Find precise technical information about pgflow's APIs, configuration options, a href="/reference/compile-api/" description="Generate SQL statements from Flow definitions programmatically" /> +