diff --git a/ARCHITECTURE_GUIDE.md b/ARCHITECTURE_GUIDE.md
index 80de276c8..b2575ba05 100644
--- a/ARCHITECTURE_GUIDE.md
+++ b/ARCHITECTURE_GUIDE.md
@@ -221,7 +221,7 @@ npx pgflow compile path/to/flow.ts
```typescript
import { createFlowWorker } from '@pgflow/edge-worker';
import { createClient } from '@supabase/supabase-js';
-import MyFlow from './_flows/my_flow.ts';
+import { MyFlow } from '../../flows/my_flow.ts';
// Create Supabase client
const supabase = createClient(
diff --git a/package.json b/package.json
index 7be32723d..3aef77b4a 100644
--- a/package.json
+++ b/package.json
@@ -44,7 +44,6 @@
"netlify-cli": "^22.1.3",
"nx": "21.2.1",
"prettier": "^2.6.2",
- "supabase": "^2.34.3",
"tslib": "^2.3.0",
"typescript": "5.8.3",
"typescript-eslint": "8.34.1",
diff --git a/pkgs/cli/README.md b/pkgs/cli/README.md
index 77405b4e4..c29f9eaf1 100644
--- a/pkgs/cli/README.md
+++ b/pkgs/cli/README.md
@@ -65,7 +65,7 @@ The installer will:
Convert a TypeScript flow definition into a SQL migration:
```bash
-npx pgflow@latest compile supabase/functions/_flows/my_flow.ts
+npx pgflow@latest compile my_flow
```
Options:
diff --git a/pkgs/cli/__tests__/commands/install/create-edge-function.test.ts b/pkgs/cli/__tests__/commands/install/create-edge-function.test.ts
index 5f6d3fa17..99deb22f9 100644
--- a/pkgs/cli/__tests__/commands/install/create-edge-function.test.ts
+++ b/pkgs/cli/__tests__/commands/install/create-edge-function.test.ts
@@ -41,11 +41,11 @@ describe('createEdgeFunction', () => {
expect(fs.existsSync(indexPath)).toBe(true);
expect(fs.existsSync(denoJsonPath)).toBe(true);
- // Verify index.ts content (inline flow registration, no flows.ts)
+ // Verify index.ts content (namespace import from flows directory)
const indexContent = fs.readFileSync(indexPath, 'utf8');
expect(indexContent).toContain("import { ControlPlane } from '@pgflow/edge-worker'");
- expect(indexContent).toContain('ControlPlane.serve([');
- expect(indexContent).toContain('// Import your flows here');
+ expect(indexContent).toContain("import * as flows from '../../flows/index.ts'");
+ expect(indexContent).toContain('ControlPlane.serve(flows)');
// Verify deno.json content
const denoJsonContent = fs.readFileSync(denoJsonPath, 'utf8');
diff --git a/pkgs/cli/__tests__/commands/install/create-flows-directory.test.ts b/pkgs/cli/__tests__/commands/install/create-flows-directory.test.ts
new file mode 100644
index 000000000..fa4b40540
--- /dev/null
+++ b/pkgs/cli/__tests__/commands/install/create-flows-directory.test.ts
@@ -0,0 +1,150 @@
+import { describe, it, expect, beforeEach, afterEach } from 'vitest';
+import fs from 'fs';
+import path from 'path';
+import os from 'os';
+import { createFlowsDirectory } from '../../../src/commands/install/create-flows-directory';
+
+describe('createFlowsDirectory', () => {
+ let tempDir: string;
+ let supabasePath: string;
+ let flowsDir: string;
+
+ beforeEach(() => {
+ // Create a temporary directory for testing
+ tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'pgflow-test-'));
+ supabasePath = path.join(tempDir, 'supabase');
+ flowsDir = path.join(supabasePath, 'flows');
+ });
+
+ afterEach(() => {
+ // Clean up the temporary directory
+ fs.rmSync(tempDir, { recursive: true, force: true });
+ });
+
+ it('should create both files when none exist', async () => {
+ const result = await createFlowsDirectory({
+ supabasePath,
+ autoConfirm: true,
+ });
+
+ // Should return true because files were created
+ expect(result).toBe(true);
+
+ // Verify directory was created
+ expect(fs.existsSync(flowsDir)).toBe(true);
+
+ // Verify all files exist
+ const indexPath = path.join(flowsDir, 'index.ts');
+ const exampleFlowPath = path.join(flowsDir, 'example_flow.ts');
+
+ expect(fs.existsSync(indexPath)).toBe(true);
+ expect(fs.existsSync(exampleFlowPath)).toBe(true);
+ });
+
+ it('should create index.ts with barrel export pattern', async () => {
+ await createFlowsDirectory({
+ supabasePath,
+ autoConfirm: true,
+ });
+
+ const indexPath = path.join(flowsDir, 'index.ts');
+ const indexContent = fs.readFileSync(indexPath, 'utf8');
+
+ // Should have export for ExampleFlow
+ expect(indexContent).toContain("export { ExampleFlow } from './example_flow.ts'");
+ // Should have documenting comment
+ expect(indexContent).toContain('Re-export all flows');
+ });
+
+ it('should create example_flow.ts with named export', async () => {
+ await createFlowsDirectory({
+ supabasePath,
+ autoConfirm: true,
+ });
+
+ const exampleFlowPath = path.join(flowsDir, 'example_flow.ts');
+ const exampleFlowContent = fs.readFileSync(exampleFlowPath, 'utf8');
+
+ // Should use named export (not default)
+ expect(exampleFlowContent).toContain('export const ExampleFlow');
+ // Should import Flow from @pgflow/dsl
+ expect(exampleFlowContent).toContain("import { Flow } from '@pgflow/dsl'");
+ // Should have correct slug
+ expect(exampleFlowContent).toContain("slug: 'example_flow'");
+ // Should have input type
+ expect(exampleFlowContent).toContain('type Input');
+ // Should have at least one step
+ expect(exampleFlowContent).toContain('.step(');
+ });
+
+ it('should not create files when they already exist', async () => {
+ // Pre-create the directory and files
+ fs.mkdirSync(flowsDir, { recursive: true });
+
+ const indexPath = path.join(flowsDir, 'index.ts');
+ const exampleFlowPath = path.join(flowsDir, 'example_flow.ts');
+
+ fs.writeFileSync(indexPath, '// existing content');
+ fs.writeFileSync(exampleFlowPath, '// existing content');
+
+ const result = await createFlowsDirectory({
+ supabasePath,
+ autoConfirm: true,
+ });
+
+ // Should return false because no changes were needed
+ expect(result).toBe(false);
+
+ // Verify files still exist with original content
+ expect(fs.readFileSync(indexPath, 'utf8')).toBe('// existing content');
+ expect(fs.readFileSync(exampleFlowPath, 'utf8')).toBe('// existing content');
+ });
+
+ it('should create only missing files when some already exist', async () => {
+ // Pre-create the directory and one file
+ fs.mkdirSync(flowsDir, { recursive: true });
+
+ const indexPath = path.join(flowsDir, 'index.ts');
+ const exampleFlowPath = path.join(flowsDir, 'example_flow.ts');
+
+ // Only create index.ts
+ fs.writeFileSync(indexPath, '// existing content');
+
+ const result = await createFlowsDirectory({
+ supabasePath,
+ autoConfirm: true,
+ });
+
+ // Should return true because example_flow.ts was created
+ expect(result).toBe(true);
+
+ // Verify index.ts was not modified
+ expect(fs.readFileSync(indexPath, 'utf8')).toBe('// existing content');
+
+ // Verify example_flow.ts was created
+ expect(fs.existsSync(exampleFlowPath)).toBe(true);
+
+ const exampleContent = fs.readFileSync(exampleFlowPath, 'utf8');
+ expect(exampleContent).toContain('export const ExampleFlow');
+ });
+
+ it('should create parent directories if they do not exist', async () => {
+ // Don't create anything - let the function create it all
+ expect(fs.existsSync(supabasePath)).toBe(false);
+
+ const result = await createFlowsDirectory({
+ supabasePath,
+ autoConfirm: true,
+ });
+
+ expect(result).toBe(true);
+
+ // Verify all parent directories were created
+ expect(fs.existsSync(supabasePath)).toBe(true);
+ expect(fs.existsSync(flowsDir)).toBe(true);
+
+ // Verify files exist
+ expect(fs.existsSync(path.join(flowsDir, 'index.ts'))).toBe(true);
+ expect(fs.existsSync(path.join(flowsDir, 'example_flow.ts'))).toBe(true);
+ });
+});
diff --git a/pkgs/cli/src/commands/install/create-edge-function.ts b/pkgs/cli/src/commands/install/create-edge-function.ts
index f8d3f2c86..d8137ee13 100644
--- a/pkgs/cli/src/commands/install/create-edge-function.ts
+++ b/pkgs/cli/src/commands/install/create-edge-function.ts
@@ -5,13 +5,9 @@ import chalk from 'chalk';
import { getVersion } from '../../utils/get-version.js';
const INDEX_TS_TEMPLATE = `import { ControlPlane } from '@pgflow/edge-worker';
-// Import your flows here:
-// import { MyFlow } from '../_flows/my_flow.ts';
+import * as flows from '../../flows/index.ts';
-ControlPlane.serve([
- // Add your flows here:
- // MyFlow,
-]);
+ControlPlane.serve(flows);
`;
const DENO_JSON_TEMPLATE = (version: string) => `{
diff --git a/pkgs/cli/src/commands/install/create-flows-directory.ts b/pkgs/cli/src/commands/install/create-flows-directory.ts
new file mode 100644
index 000000000..483aa26ed
--- /dev/null
+++ b/pkgs/cli/src/commands/install/create-flows-directory.ts
@@ -0,0 +1,91 @@
+import fs from 'fs';
+import path from 'path';
+import { log, confirm } from '@clack/prompts';
+import chalk from 'chalk';
+
+const INDEX_TS_TEMPLATE = `// Re-export all flows from this directory
+// Example: export { MyFlow } from './my_flow.ts';
+
+export { ExampleFlow } from './example_flow.ts';
+`;
+
+const EXAMPLE_FLOW_TEMPLATE = `import { Flow } from '@pgflow/dsl';
+
+type Input = { name: string };
+
+export const ExampleFlow = new Flow({ slug: 'example_flow' })
+ .step({ slug: 'greet' }, (input) => \`Hello, \${input.run.name}!\`);
+`;
+
+export async function createFlowsDirectory({
+ supabasePath,
+ autoConfirm = false,
+}: {
+ supabasePath: string;
+ autoConfirm?: boolean;
+}): Promise {
+ const flowsDir = path.join(supabasePath, 'flows');
+
+ const indexPath = path.join(flowsDir, 'index.ts');
+ const exampleFlowPath = path.join(flowsDir, 'example_flow.ts');
+
+ // Relative paths for display
+ const relativeFlowsDir = 'supabase/flows';
+ const relativeIndexPath = `${relativeFlowsDir}/index.ts`;
+ const relativeExampleFlowPath = `${relativeFlowsDir}/example_flow.ts`;
+
+ // Check what needs to be created
+ const filesToCreate: Array<{ path: string; relativePath: string }> = [];
+
+ if (!fs.existsSync(indexPath)) {
+ filesToCreate.push({ path: indexPath, relativePath: relativeIndexPath });
+ }
+
+ if (!fs.existsSync(exampleFlowPath)) {
+ filesToCreate.push({ path: exampleFlowPath, relativePath: relativeExampleFlowPath });
+ }
+
+ // If all files exist, return success
+ if (filesToCreate.length === 0) {
+ log.success('Flows directory already up to date');
+ return false;
+ }
+
+ // Show preview and ask for confirmation only when not auto-confirming
+ if (!autoConfirm) {
+ const summaryMsg = [
+ `Create ${chalk.cyan('flows/')} ${chalk.dim('(flow definitions directory)')}:`,
+ '',
+ ...filesToCreate.map((file) => ` ${chalk.bold(path.basename(file.relativePath))}`),
+ ].join('\n');
+
+ log.info(summaryMsg);
+
+ const confirmResult = await confirm({
+ message: `Create flows/?`,
+ });
+
+ if (confirmResult !== true) {
+ log.warn('Flows directory installation skipped');
+ return false;
+ }
+ }
+
+ // Create the directory if it doesn't exist
+ if (!fs.existsSync(flowsDir)) {
+ fs.mkdirSync(flowsDir, { recursive: true });
+ }
+
+ // Create files
+ if (filesToCreate.some((f) => f.path === indexPath)) {
+ fs.writeFileSync(indexPath, INDEX_TS_TEMPLATE);
+ }
+
+ if (filesToCreate.some((f) => f.path === exampleFlowPath)) {
+ fs.writeFileSync(exampleFlowPath, EXAMPLE_FLOW_TEMPLATE);
+ }
+
+ log.success('Flows directory created');
+
+ return true;
+}
diff --git a/pkgs/cli/src/commands/install/index.ts b/pkgs/cli/src/commands/install/index.ts
index be281ed2c..95fdbb1c2 100644
--- a/pkgs/cli/src/commands/install/index.ts
+++ b/pkgs/cli/src/commands/install/index.ts
@@ -5,6 +5,7 @@ import { copyMigrations } from './copy-migrations.js';
import { updateConfigToml } from './update-config-toml.js';
import { updateEnvFile } from './update-env-file.js';
import { createEdgeFunction } from './create-edge-function.js';
+import { createFlowsDirectory } from './create-flows-directory.js';
import { supabasePathPrompt } from './supabase-path-prompt.js';
export default (program: Command) => {
@@ -34,6 +35,7 @@ export default (program: Command) => {
'',
` • Update ${chalk.cyan('supabase/config.toml')} ${chalk.dim('(enable pooler, per_worker runtime)')}`,
` • Add pgflow migrations to ${chalk.cyan('supabase/migrations/')}`,
+ ` • Create ${chalk.cyan('supabase/flows/')} ${chalk.dim('(flow definitions with example)')}`,
` • Create Control Plane in ${chalk.cyan('supabase/functions/pgflow/')}`,
` • Configure ${chalk.cyan('supabase/functions/.env')}`,
'',
@@ -73,6 +75,11 @@ export default (program: Command) => {
autoConfirm: true,
});
+ const flowsDirectory = await createFlowsDirectory({
+ supabasePath,
+ autoConfirm: true,
+ });
+
const edgeFunction = await createEdgeFunction({
supabasePath,
autoConfirm: true,
@@ -86,7 +93,7 @@ export default (program: Command) => {
// Step 4: Show completion message
const outroMessages: string[] = [];
- if (migrations || configUpdate || edgeFunction || envFile) {
+ if (migrations || configUpdate || flowsDirectory || edgeFunction || envFile) {
outroMessages.push(chalk.green.bold('✓ Installation complete!'));
} else {
outroMessages.push(
diff --git a/pkgs/cli/supabase/flows/index.ts b/pkgs/cli/supabase/flows/index.ts
new file mode 100644
index 000000000..3c1eb5ae9
--- /dev/null
+++ b/pkgs/cli/supabase/flows/index.ts
@@ -0,0 +1,2 @@
+// Re-export all flows from this directory
+export { TestFlowE2E } from './test_flow_e2e.ts';
diff --git a/pkgs/cli/supabase/functions/_flows/test_flow_e2e.ts b/pkgs/cli/supabase/flows/test_flow_e2e.ts
similarity index 90%
rename from pkgs/cli/supabase/functions/_flows/test_flow_e2e.ts
rename to pkgs/cli/supabase/flows/test_flow_e2e.ts
index 92a719771..db7e56399 100644
--- a/pkgs/cli/supabase/functions/_flows/test_flow_e2e.ts
+++ b/pkgs/cli/supabase/flows/test_flow_e2e.ts
@@ -7,5 +7,3 @@ export const TestFlowE2E = new Flow<{ value: string }>({
}).step({ slug: 'step1' }, async (input) => ({
result: `processed: ${input.run.value}`,
}));
-
-export default TestFlowE2E;
diff --git a/pkgs/cli/supabase/functions/pgflow/index.ts b/pkgs/cli/supabase/functions/pgflow/index.ts
index d2c3d840a..b2dab37b7 100644
--- a/pkgs/cli/supabase/functions/pgflow/index.ts
+++ b/pkgs/cli/supabase/functions/pgflow/index.ts
@@ -1,4 +1,4 @@
import { ControlPlane } from '@pgflow/edge-worker';
-import { TestFlowE2E } from '../_flows/test_flow_e2e.ts';
+import * as flows from '../../flows/index.ts';
-ControlPlane.serve([TestFlowE2E]);
+ControlPlane.serve(flows);
diff --git a/pkgs/edge-worker/src/control-plane/index.ts b/pkgs/edge-worker/src/control-plane/index.ts
index 54914fc8f..f92a9c19f 100644
--- a/pkgs/edge-worker/src/control-plane/index.ts
+++ b/pkgs/edge-worker/src/control-plane/index.ts
@@ -6,8 +6,18 @@
*
* @example
* ```typescript
+ * // Using namespace import (recommended)
* import { ControlPlane } from '@pgflow/edge-worker';
- * import { MyFlow } from '../_flows/my_flow.ts';
+ * import * as flows from '../../flows/index.ts';
+ *
+ * ControlPlane.serve(flows);
+ * ```
+ *
+ * @example
+ * ```typescript
+ * // Using array (legacy)
+ * import { ControlPlane } from '@pgflow/edge-worker';
+ * import { MyFlow } from '../../flows/my_flow.ts';
*
* ControlPlane.serve([MyFlow]);
* ```
@@ -21,7 +31,7 @@ import { serveControlPlane } from './server.js';
export const ControlPlane = {
/**
* Start the ControlPlane HTTP server
- * @param flows Array of flow definitions to register
+ * @param flowsInput Array or object of flow definitions to register
*/
serve: serveControlPlane,
};
diff --git a/pkgs/edge-worker/src/control-plane/server.ts b/pkgs/edge-worker/src/control-plane/server.ts
index 2dd52ec24..3121f242b 100644
--- a/pkgs/edge-worker/src/control-plane/server.ts
+++ b/pkgs/edge-worker/src/control-plane/server.ts
@@ -17,6 +17,20 @@ export interface ErrorResponse {
message: string;
}
+/**
+ * Input type for flow registration - accepts array or object (for namespace imports)
+ */
+export type FlowInput = AnyFlow[] | Record;
+
+/**
+ * Normalizes flow input to array format
+ * @param flowsInput Array or object of flows
+ * @returns Array of flows
+ */
+function normalizeFlowInput(flowsInput: FlowInput): AnyFlow[] {
+ return Array.isArray(flowsInput) ? flowsInput : Object.values(flowsInput);
+}
+
/**
* Builds a flow registry and validates no duplicate slugs
* @param flows Array of flow definitions
@@ -39,10 +53,11 @@ function buildFlowRegistry(flows: AnyFlow[]): Map {
/**
* Creates a request handler for the ControlPlane HTTP API
- * @param flows Array of flow definitions to register
+ * @param flowsInput Array or object of flow definitions to register
* @returns Request handler function
*/
-export function createControlPlaneHandler(flows: AnyFlow[]) {
+export function createControlPlaneHandler(flowsInput: FlowInput) {
+ const flows = normalizeFlowInput(flowsInput);
const registry = buildFlowRegistry(flows);
return (req: Request): Response => {
@@ -73,10 +88,10 @@ export function createControlPlaneHandler(flows: AnyFlow[]) {
/**
* Serves the ControlPlane HTTP API for flow compilation
- * @param flows Array of flow definitions to register
+ * @param flowsInput Array or object of flow definitions to register
*/
-export function serveControlPlane(flows: AnyFlow[]): void {
- const handler = createControlPlaneHandler(flows);
+export function serveControlPlane(flowsInput: FlowInput): void {
+ const handler = createControlPlaneHandler(flowsInput);
// Create HTTP server using Deno.serve (follows Supabase Edge Function pattern)
Deno.serve({}, handler);
diff --git a/pkgs/edge-worker/tests/unit/control-plane/server.test.ts b/pkgs/edge-worker/tests/unit/control-plane/server.test.ts
index 3cf541abf..b61f1089f 100644
--- a/pkgs/edge-worker/tests/unit/control-plane/server.test.ts
+++ b/pkgs/edge-worker/tests/unit/control-plane/server.test.ts
@@ -143,3 +143,89 @@ Deno.test('ControlPlane Handler - GET /flows/:slug returns 500 on compilation er
assertEquals(data.error, 'Compilation Error');
assertMatch(data.message, /does not exist in flow/);
});
+
+// Tests for object input support (namespace imports)
+Deno.test('ControlPlane Handler - accepts object of flows', async () => {
+ const flowsObject = {
+ FlowWithSingleStep,
+ FlowWithRuntimeOptions,
+ };
+
+ const handler = createControlPlaneHandler(flowsObject);
+ const expectedSql = compileFlow(FlowWithSingleStep);
+
+ const request = new Request('http://localhost/pgflow/flows/flow_single_step');
+ const response = handler(request);
+
+ assertEquals(response.status, 200);
+ const data = await response.json();
+ assertEquals(data.flowSlug, 'flow_single_step');
+ assertEquals(data.sql, expectedSql);
+});
+
+Deno.test('ControlPlane Handler - accepts namespace import style object', () => {
+ // Simulates: import * as flows from './flows/index.ts'
+ const flowsNamespace = {
+ FlowWithSingleStep,
+ FlowWithMultipleSteps,
+ FlowWithParallelSteps,
+ };
+
+ const handler = createControlPlaneHandler(flowsNamespace);
+
+ // All flows should be accessible
+ for (const flow of [FlowWithSingleStep, FlowWithMultipleSteps, FlowWithParallelSteps]) {
+ const request = new Request(`http://localhost/pgflow/flows/${flow.slug}`);
+ const response = handler(request);
+ assertEquals(response.status, 200);
+ }
+});
+
+Deno.test('ControlPlane Handler - object input rejects duplicate flow slugs', () => {
+ // Create two different flow objects with the same slug
+ const Flow1 = new Flow({ slug: 'duplicate_slug' }).step({ slug: 's1' }, () => ({}));
+ const Flow2 = new Flow({ slug: 'duplicate_slug' }).step({ slug: 's2' }, () => ({}));
+
+ let error: Error | null = null;
+ try {
+ createControlPlaneHandler({ Flow1, Flow2 });
+ } catch (e) {
+ error = e as Error;
+ }
+
+ assertEquals(error instanceof Error, true);
+ assertMatch(error!.message, /Duplicate flow slug detected: 'duplicate_slug'/);
+});
+
+Deno.test('ControlPlane Handler - object input returns 404 for unknown flow', async () => {
+ const handler = createControlPlaneHandler({ FlowWithSingleStep });
+
+ const request = new Request('http://localhost/pgflow/flows/unknown_flow');
+ const response = handler(request);
+
+ assertEquals(response.status, 404);
+ const data = await response.json();
+ assertEquals(data.error, 'Flow Not Found');
+});
+
+Deno.test('ControlPlane Handler - empty object creates handler with no flows', async () => {
+ const handler = createControlPlaneHandler({});
+
+ const request = new Request('http://localhost/pgflow/flows/any_flow');
+ const response = handler(request);
+
+ assertEquals(response.status, 404);
+ const data = await response.json();
+ assertEquals(data.error, 'Flow Not Found');
+});
+
+Deno.test('ControlPlane Handler - empty array creates handler with no flows', async () => {
+ const handler = createControlPlaneHandler([]);
+
+ const request = new Request('http://localhost/pgflow/flows/any_flow');
+ const response = handler(request);
+
+ assertEquals(response.status, 404);
+ const data = await response.json();
+ assertEquals(data.error, 'Flow Not Found');
+});
diff --git a/pkgs/website/src/content/docs/build/organize-flow-code.mdx b/pkgs/website/src/content/docs/build/organize-flow-code.mdx
index a2f9d5eb0..b8d0c1544 100644
--- a/pkgs/website/src/content/docs/build/organize-flow-code.mdx
+++ b/pkgs/website/src/content/docs/build/organize-flow-code.mdx
@@ -10,35 +10,69 @@ import { FileTree } from '@astrojs/starlight/components';
This guide outlines best practices for organizing your pgflow codebase to improve maintainability, reusability, and clarity.
-## How to structure your code
+:::note[Flexible Structure]
+The directory structure shown here is a **recommendation**, not a requirement. pgflow works with any file organization - what matters is that your flows are imported into the Control Plane (for compilation) and into your worker Edge Functions (for execution). Feel free to adapt the structure to your project's needs.
+:::
-Following the pattern from [Create Flow](/get-started/flows/create-flow/), organize flows and tasks in underscore-prefixed folders using Supabase's pattern for shared code:
+## Recommended structure
+
+After running `pgflow install`, you'll have a `flows/` directory set up. As your project grows, you can add a `tasks/` directory for reusable task functions:
```bash frame="none"
-mkdir -p supabase/functions/_flows supabase/functions/_tasks
+mkdir -p supabase/tasks
```
- supabase
+ - flows
+ - analyze_website.ts (flow definition)
+ - index.ts (re-exports all flows)
+ - tasks
+ - scrapeWebsite.ts
+ - summarizeWithAI.ts
+ - extractTags.ts
+ - saveWebsite.ts
+ - index.ts (optional - re-exports tasks)
- functions
- - _flows
- - analyze_website.ts
- - _tasks
- - scrapeWebsite.ts
- - summarizeWithAI.ts
- - extractTags.ts
- - saveWebsite.ts
+ - pgflow
+ - index.ts (Control Plane)
+ - deno.json
- analyze_website_worker
- - index.ts
+ - index.ts (worker for this flow)
- deno.json
+ - migrations
This structure enables:
-- Sharing tasks across multiple flows
-- Running multiple workers for the same flow (horizontal scaling)
-- Clear separation of flow definitions from execution
-- Standard Supabase pattern for shared code
+- **First-class concepts**: Flows and tasks are not hidden in underscore-prefixed folders
+- **Self-documenting**: Structure clearly shows where things go
+- **Clean imports**: `../../flows/index.ts` from Control Plane, `../../tasks/` from flows
+- **Scalable**: Ready for multiple flows and workers
+- **Barrel exports**: Each directory has an `index.ts` for clean re-exports
+
+## The index.ts pattern
+
+Each directory uses an `index.ts` barrel file that re-exports all modules:
+
+```typescript title="supabase/flows/index.ts"
+// Re-export all flows from this directory
+export { AnalyzeWebsite } from './analyze_website.ts';
+export { ProcessOrder } from './process_order.ts';
+```
+
+```typescript title="supabase/tasks/index.ts" (optional)
+// Re-export all tasks from this directory
+export { scrapeWebsite } from './scrapeWebsite.ts';
+export { summarizeWithAI } from './summarizeWithAI.ts';
+```
+
+This pattern provides:
+
+- Clean imports from other files
+- Single place to see all available flows/tasks
+- Better IDE support with autocomplete
+- Easy to add new flows - just add one export line
## Learn more
diff --git a/pkgs/website/src/content/docs/build/version-flows.mdx b/pkgs/website/src/content/docs/build/version-flows.mdx
index 0679c6027..768bf5899 100644
--- a/pkgs/website/src/content/docs/build/version-flows.mdx
+++ b/pkgs/website/src/content/docs/build/version-flows.mdx
@@ -39,33 +39,39 @@ They break active runs and corrupt data.
Put the new flow in its own file with a versioned slug.
-- supabase/functions/
- - _tasks/
+- supabase/
+ - flows/
+ - greet_user.ts
+ - **greet_user_v2.ts** // new version
+ - index.ts
+ - tasks/
- fetchUserData.ts
- sendEmail.ts
- - _flows/
- - greet_user.ts
- - **greet_user_v2.ts** // 👈 new
1. **Create new flow file**
- ```typescript
- // supabase/functions/_flows/greet_user_v2.ts
- export default new Flow({
+ ```typescript title="supabase/flows/greet_user_v2.ts"
+ export const GreetUserV2 = new Flow({
slug: 'greet_user_v2',
// ...new configuration and step definitions
})
```
+ Then add it to `supabase/flows/index.ts`:
+
+ ```typescript
+ export { GreetUserV2 } from './greet_user_v2.ts';
+ ```
+
2. **Compile it**
[Compile the new flow to SQL](/get-started/flows/compile-flow/) which generates a migration file:
```bash frame="none"
- npx pgflow@latest compile supabase/functions/_flows/greet_user_v2.ts
+ npx pgflow@latest compile greet_user_v2
```
3. **Run migration**
diff --git a/pkgs/website/src/content/docs/concepts/compilation.mdx b/pkgs/website/src/content/docs/concepts/compilation.mdx
index 2b533083d..4434c2769 100644
--- a/pkgs/website/src/content/docs/concepts/compilation.mdx
+++ b/pkgs/website/src/content/docs/concepts/compilation.mdx
@@ -44,13 +44,9 @@ The `pgflow` edge function is created during installation and serves as your pro
```typescript title="supabase/functions/pgflow/index.ts"
import { ControlPlane } from '@pgflow/edge-worker';
-import GreetUser from '../_flows/greet_user.ts';
-import ProcessOrder from '../_flows/process_order.ts';
+import * as flows from '../../flows/index.ts';
-ControlPlane.serve([
- GreetUser,
- ProcessOrder,
-]);
+ControlPlane.serve(flows);
```
The ControlPlane:
@@ -75,9 +71,10 @@ This architecture provides several benefits:
To make a flow available for compilation:
-1. Create the flow definition in `_flows/`
-2. Import it in `supabase/functions/pgflow/index.ts`
-3. Add it to the `ControlPlane.serve([...])` array
+1. Create the flow definition in `supabase/flows/`
+2. Export it from `supabase/flows/index.ts`
+
+The ControlPlane automatically picks up all flows exported from your `flows/index.ts`.
diff --git a/pkgs/website/src/content/docs/get-started/flows/run-flow.mdx b/pkgs/website/src/content/docs/get-started/flows/run-flow.mdx
index 2215b01bd..6d2e3c158 100644
--- a/pkgs/website/src/content/docs/get-started/flows/run-flow.mdx
+++ b/pkgs/website/src/content/docs/get-started/flows/run-flow.mdx
@@ -33,8 +33,8 @@ Before starting, make sure you have completed:
Replace contents of `index.ts` file with the following:
```typescript title="supabase/functions/greet_user_worker/index.ts"
- import { EdgeWorker } from "jsr:@pgflow/edge-worker";
- import GreetUser from '../_flows/greet_user.ts';
+ import { EdgeWorker } from "@pgflow/edge-worker";
+ import { GreetUser } from '../../flows/greet_user.ts';
// Pass the flow definition to the Edge Worker
EdgeWorker.start(GreetUser);
diff --git a/pkgs/website/src/content/docs/tutorials/ai-web-scraper/backend.mdx b/pkgs/website/src/content/docs/tutorials/ai-web-scraper/backend.mdx
index 47fd3b32c..0627832aa 100644
--- a/pkgs/website/src/content/docs/tutorials/ai-web-scraper/backend.mdx
+++ b/pkgs/website/src/content/docs/tutorials/ai-web-scraper/backend.mdx
@@ -72,7 +72,7 @@ Build four focused functions that each do one thing well:
| `extractTags.ts` | AI tags |
| `saveToDb.ts` | Saves to database |
-Put these in `supabase/functions/_tasks` (see [organizing flow code](/build/organize-flow-code/) for project structure):
+Put these in `supabase/tasks/` (see [organizing flow code](/build/organize-flow-code/) for project structure):
### Web Scraping
@@ -80,7 +80,7 @@ Put these in `supabase/functions/_tasks` (see [organizing flow code](/build/orga
scrapeWebsite.ts - Fetch and clean webpage content
```typescript
-// supabase/functions/_tasks/scrapeWebsite.ts
+// supabase/tasks/scrapeWebsite.ts
export default async function scrapeWebsite(url: string) {
console.log("[scrapeWebsite] fetching", url);
@@ -117,7 +117,7 @@ We're using OpenAI's newer Responses API (`openai.responses.parse`) rather than
summarize.ts - AI summary
```typescript
-// supabase/functions/_tasks/summarize.ts
+// supabase/tasks/summarize.ts
import OpenAI from "npm:openai";
export default async function summarize(content: string) {
@@ -162,7 +162,7 @@ export default async function summarize(content: string) {
extractTags.ts - Extract tags
```typescript
-// supabase/functions/_tasks/extractTags.ts
+// supabase/tasks/extractTags.ts
import OpenAI from "npm:openai";
export default async function extractTags(content: string) {
@@ -214,7 +214,7 @@ The final task saves all results to your database:
saveToDb.ts - Store results
```typescript
-// supabase/functions/_tasks/saveToDb.ts
+// supabase/tasks/saveToDb.ts
import { createClient } from "jsr:@supabase/supabase-js";
export default async function saveToDb(row: {
@@ -254,19 +254,19 @@ Uses service role key for direct database access. Both URL and key are auto-avai
## Step 3 - Define flow
-Connect tasks into a workflow using pgflow's [TypeScript DSL](/concepts/understanding-flows/) (`supabase/functions/_flows/analyze_website.ts`):
+Connect tasks into a workflow using pgflow's [TypeScript DSL](/concepts/understanding-flows/) (`supabase/flows/analyze_website.ts`):
```typescript
-// supabase/functions/_flows/analyze_website.ts
-import { Flow } from "npm:@pgflow/dsl";
-import scrapeWebsite from "../_tasks/scrapeWebsite.ts";
-import summarize from "../_tasks/summarize.ts";
-import extractTags from "../_tasks/extractTags.ts";
-import saveToDb from "../_tasks/saveToDb.ts";
+// supabase/flows/analyze_website.ts
+import { Flow } from "@pgflow/dsl";
+import scrapeWebsite from "../tasks/scrapeWebsite.ts";
+import summarize from "../tasks/summarize.ts";
+import extractTags from "../tasks/extractTags.ts";
+import saveToDb from "../tasks/saveToDb.ts";
type Input = { url: string };
-export default new Flow({ slug: "analyzeWebsite", maxAttempts: 3 })
+export const AnalyzeWebsite = new Flow({ slug: "analyzeWebsite", maxAttempts: 3 })
.step({ slug: "website" }, ({ run }) => scrapeWebsite(run.url))
.step({ slug: "summary", dependsOn: ["website"] }, ({ website }) =>
summarize(website.content),
@@ -302,25 +302,18 @@ Summary and tags execute simultaneously since both only need website content - c
Turn your TypeScript flow into SQL using pgflow's [compiler](/get-started/flows/compile-flow/):
-1. Compile TypeScript to SQL:
- ```bash
- npx pgflow@latest compile supabase/functions/_flows/analyze_website.ts
- # Generates supabase/migrations/_analyze_website.sql
+1. First, export the flow from `supabase/flows/index.ts`:
+ ```typescript
+ export { AnalyzeWebsite } from './analyze_website.ts';
```
-
- Using import maps or custom `deno.json`?
-
- You can use `--deno-json` flag to point at your `deno.json` file:
- ```bash
- npx pgflow@latest compile \
- --deno-json=path/to/deno.json \
- supabase/functions/_flows/analyze_website.ts
- ```
- Run `npx pgflow@latest compile --help` for additional options.
-
+2. Compile TypeScript to SQL:
+ ```bash
+ npx pgflow@latest compile analyzeWebsite
+ # Generates supabase/migrations/_analyzeWebsite.sql
+ ```
-2. Apply migration to database:
+3. Apply migration to database:
```bash
npx supabase migrations up --local
```
@@ -345,10 +338,10 @@ Changing steps requires a new flow with unique `slug`. This is a core design dec
2. Replace the generated `index.ts` with the following code:
```typescript
// supabase/functions/analyze_website_worker/index.ts
- import { EdgeWorker } from "jsr:@pgflow/edge-worker";
- import AnalyzeWebsite from '../_flows/analyze_website.ts';
+ import { EdgeWorker } from "@pgflow/edge-worker";
+ import { AnalyzeWebsite } from '../../flows/analyze_website.ts';
- EdgeWorker.start(AnalyzeWebsite); // That's it! 🤯
+ EdgeWorker.start(AnalyzeWebsite);
```
3. Update your `supabase/config.toml`:
diff --git a/pkgs/website/src/content/docs/tutorials/ai-web-scraper/index.mdx b/pkgs/website/src/content/docs/tutorials/ai-web-scraper/index.mdx
index 103032435..fd1c7021d 100644
--- a/pkgs/website/src/content/docs/tutorials/ai-web-scraper/index.mdx
+++ b/pkgs/website/src/content/docs/tutorials/ai-web-scraper/index.mdx
@@ -52,14 +52,15 @@ Here's the file structure we'll create:
- supabase/
+ - flows/
+ - analyze_website.ts
+ - index.ts
+ - tasks/
+ - scrapeWebsite.ts
+ - summarize.ts
+ - extractTags.ts
+ - saveToDb.ts
- functions/
- - _tasks/
- - scrapeWebsite.ts
- - summarize.ts
- - extractTags.ts
- - saveToDb.ts
- - _flows/
- - analyze_website.ts
- analyze_website_worker/
- index.ts
diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml
index 1915a5436..6b3b89f63 100644
--- a/pnpm-lock.yaml
+++ b/pnpm-lock.yaml
@@ -94,7 +94,7 @@ importers:
specifier: ^2.6.2
version: 2.8.8
supabase:
- specifier: ^2.34.3
+ specifier: ^2.62.10
version: 2.62.10
tslib:
specifier: ^2.3.0