diff --git a/.changeset/config.json b/.changeset/config.json index cc6eee0cf..3fbd70973 100644 --- a/.changeset/config.json +++ b/.changeset/config.json @@ -18,6 +18,7 @@ "@workflow/example-nitro-v2", "@workflow/example-nuxt", "@workflow/example-vite", - "@workflow/example-sveltekit" + "@workflow/example-sveltekit", + "@workflow/example-astro" ] } diff --git a/.changeset/green-clouds-search.md b/.changeset/green-clouds-search.md new file mode 100644 index 000000000..d07e97470 --- /dev/null +++ b/.changeset/green-clouds-search.md @@ -0,0 +1,5 @@ +--- +"@workflow/core": patch +--- + +Better error when passing an invalid workflow value to `start()` diff --git a/.changeset/grumpy-ties-sort.md b/.changeset/grumpy-ties-sort.md new file mode 100644 index 000000000..97cc879fc --- /dev/null +++ b/.changeset/grumpy-ties-sort.md @@ -0,0 +1,7 @@ +--- +"@workflow/builders": patch +"workflow": patch +"@workflow/astro": patch +--- + +Add @workflow/astro package diff --git a/.changeset/neat-comics-care.md b/.changeset/neat-comics-care.md new file mode 100644 index 000000000..cd9262065 --- /dev/null +++ b/.changeset/neat-comics-care.md @@ -0,0 +1,6 @@ +--- +"@workflow/world": patch +"@workflow/core": patch +--- + +track queue overhead with opentelemetry diff --git a/.changeset/poor-lines-make.md b/.changeset/poor-lines-make.md new file mode 100644 index 000000000..3f605f053 --- /dev/null +++ b/.changeset/poor-lines-make.md @@ -0,0 +1,5 @@ +--- +"@workflow/swc-plugin": patch +--- + +Normalize anonymous default export workflow IDs to "default" diff --git a/.changeset/pre.json b/.changeset/pre.json index 885839955..24e804e51 100644 --- a/.changeset/pre.json +++ b/.changeset/pre.json @@ -31,10 +31,12 @@ "@workflow/example-nuxt": "0.0.0", "@workflow/example-vite": "0.0.0", "@workflow/example-sveltekit": "0.0.0", + "@workflow/example-astro": "0.0.0", "@workflow/builders": "4.0.1-beta.3", "@workflow/sveltekit": "4.0.0-beta.1", "@workflow/nuxt": "4.0.1-beta.6", - "@workflow/rollup": "4.0.0-beta.1" + "@workflow/rollup": "4.0.0-beta.1", + "@workflow/astro": "4.0.0-beta.1" }, "changesets": [ "add-documentation", @@ -45,6 +47,7 @@ "bigint-serialization", "bitter-ads-hear", "bitter-trees-rest", + "blue-sloths-juggle", "breezy-schools-wonder", "bright-ducks-travel", "bumpy-taxis-learn", @@ -74,17 +77,22 @@ "dirty-sloths-cut", "dry-mammals-change", "dull-adults-wonder", + "eager-forks-yawn", "eager-lands-rhyme", "easy-coats-find", "easy-donkeys-lie", "eight-clowns-own", "eight-clubs-refuse", + "eight-emus-shave", + "eighty-bushes-relate", "eleven-roses-enter", "evil-mammals-hear", "extract-path-helpers", "extract-queue-triggers", + "famous-grapes-call", "famous-jeans-itch", "fast-eels-warn", + "fifty-teeth-grin", "fine-ideas-beam", "five-planets-push", "fix-cli-flag-docs-typo", @@ -95,11 +103,15 @@ "fluffy-peaches-buy", "forty-crabs-wonder", "four-pillows-fall", + "free-bugs-design", "free-forks-relax", "funny-chicken-burn", "funny-games-sniff", + "fuzzy-boxes-bow", "good-icons-love", + "green-clouds-search", "green-ducks-wave", + "grumpy-ties-sort", "handle-tool-errors", "happy-bees-greet", "heavy-baboons-relax", @@ -109,8 +121,10 @@ "hungry-suits-brake", "improve-error-handling", "itchy-memes-attend", + "khaki-hairs-jump", "khaki-turtles-build", "kind-vans-shout", + "large-ads-report", "large-crabs-juggle", "late-otters-melt", "lazy-wombats-dig", @@ -120,6 +134,7 @@ "light-rice-rush", "little-experts-show", "little-suits-sip", + "long-guests-enter", "lucky-breads-fry", "lucky-dolphins-flow", "lucky-signs-shine", @@ -127,10 +142,12 @@ "mean-mangos-hope", "metal-cycles-slide", "metal-sheep-wait", + "modern-bikes-shout", "moody-taxes-wonder", "move-next-builder", "nasty-donuts-grow", "nasty-olives-jam", + "neat-comics-care", "nine-beds-agree", "nine-books-heal", "nine-insects-refuse", @@ -140,10 +157,13 @@ "open-weeks-sniff", "pink-aliens-itch", "poor-eyes-build", + "poor-lines-make", "postgres-error-stack", "pretty-dolls-judge", "purple-pianos-stare", + "purple-symbols-remain", "quick-lizards-drum", + "quick-teeth-roll", "quiet-boxes-carry", "real-moose-kick", "real-oranges-lick", @@ -158,6 +178,7 @@ "silly-pens-shine", "silver-boats-fold", "sixty-baboons-wonder", + "sixty-carpets-join", "sixty-facts-slide", "slick-rabbits-travel", "slimy-hairs-thank", @@ -184,14 +205,17 @@ "thick-steaks-beam", "thin-mangos-double", "thin-squids-roll", + "three-peas-hide", "tidy-states-see", "tiny-coins-sip", + "tough-carrots-stop", "tough-comics-grab", "tough-toys-shop", "tough-wasps-notice", "tricky-wasps-ask", "twelve-pandas-study", "twenty-clouds-smoke", + "twenty-forks-grab", "twenty-gifts-win", "two-cooks-unite", "two-rabbits-burn", @@ -200,6 +224,7 @@ "violet-taxis-give", "warm-files-attack", "wet-birds-grab", + "wet-roses-happen", "whole-steaks-drum", "whole-toes-add", "whole-turkeys-beam", diff --git a/.changeset/quick-teeth-roll.md b/.changeset/quick-teeth-roll.md new file mode 100644 index 000000000..a845151cc --- /dev/null +++ b/.changeset/quick-teeth-roll.md @@ -0,0 +1,2 @@ +--- +--- diff --git a/.changeset/remove-vercel-world-remote-access.md b/.changeset/remove-vercel-world-remote-access.md new file mode 100644 index 000000000..fe4a15e78 --- /dev/null +++ b/.changeset/remove-vercel-world-remote-access.md @@ -0,0 +1,4 @@ +--- +--- + +Simplify "Remote Access Configuration" section in Vercel World documentation diff --git a/.changeset/twenty-forks-grab.md b/.changeset/twenty-forks-grab.md new file mode 100644 index 000000000..34bf90519 --- /dev/null +++ b/.changeset/twenty-forks-grab.md @@ -0,0 +1,11 @@ +--- +"@workflow/world-postgres": patch +"@workflow/world-testing": patch +"@workflow/world-local": patch +"@workflow/core": patch +"@workflow/next": patch +"@workflow/cli": patch +"@workflow/web": patch +--- + +Rename leftover references to "embedded world" to be "local world" diff --git a/.changeset/wet-roses-happen.md b/.changeset/wet-roses-happen.md new file mode 100644 index 000000000..a6a61b03a --- /dev/null +++ b/.changeset/wet-roses-happen.md @@ -0,0 +1,5 @@ +--- +"@workflow/world-vercel": patch +--- + +Update `@vercel/queue` to use new QueueClient class to simplify custom header/path overwrites diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 5043420d1..7ff7d9fdb 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -77,6 +77,8 @@ jobs: project-id: "prj_cCZjpBy92VRbKHHbarDMhOHtkuIr" - name: "fastify" project-id: "prj_5Yap0VDQ633v998iqQ3L3aQ25Cck" + - name: "astro" + project-id: "prj_YDAXj3K8LM0hgejuIMhioz2yLgTI" env: TURBO_TOKEN: ${{ secrets.TURBO_TOKEN }} TURBO_TEAM: ${{ vars.TURBO_TEAM }} @@ -193,7 +195,7 @@ jobs: run: cd workbench/${{ matrix.app.name }} && pnpm dev & echo "starting tests in 10 seconds" && sleep 10 && pnpm vitest run packages/core/e2e/dev.test.ts && sleep 10 && pnpm run test:e2e env: APP_NAME: ${{ matrix.app.name }} - DEPLOYMENT_URL: "http://localhost:${{ matrix.app.name == 'sveltekit' && '5173' || '3000' }}" + DEPLOYMENT_URL: "http://localhost:${{ matrix.app.name == 'sveltekit' && '5173' || (matrix.app.name == 'astro' && '4321' || '3000') }}" DEV_TEST_CONFIG: ${{ toJSON(matrix.app) }} e2e-local-prod: @@ -245,7 +247,7 @@ jobs: run: cd workbench/${{ matrix.app.name }} && pnpm start & echo "starting tests in 10 seconds" && sleep 10 && pnpm run test:e2e env: APP_NAME: ${{ matrix.app.name }} - DEPLOYMENT_URL: "http://localhost:${{ matrix.app.name == 'sveltekit' && '4173' || '3000' }}" + DEPLOYMENT_URL: "http://localhost:${{ matrix.app.name == 'sveltekit' && '4173' || (matrix.app.name == 'astro' && '4321' || '3000') }}" e2e-local-postgres: name: E2E Local Postgres Tests (${{ matrix.app.name }} - ${{ matrix.app.canary && 'canary' || 'stable' }}) @@ -316,7 +318,7 @@ jobs: run: cd workbench/${{ matrix.app.name }} && pnpm start & echo "starting tests in 10 seconds" && sleep 10 && pnpm run test:e2e env: APP_NAME: ${{ matrix.app.name }} - DEPLOYMENT_URL: "http://localhost:${{ matrix.app.name == 'sveltekit' && '4173' || '3000' }}" + DEPLOYMENT_URL: "http://localhost:${{ matrix.app.name == 'sveltekit' && '4173' || (matrix.app.name == 'astro' && '4321' || '3000') }}" e2e-windows: name: E2E Windows Tests diff --git a/docs/app/(home)/components/frameworks.tsx b/docs/app/(home)/components/frameworks.tsx index f24283b6a..8cc35ebe8 100644 --- a/docs/app/(home)/components/frameworks.tsx +++ b/docs/app/(home)/components/frameworks.tsx @@ -45,7 +45,8 @@ export const AstroDark = (props: ComponentProps<'svg'>) => ( xmlns="http://www.w3.org/2000/svg" {...props} > - + Astro + ) => ( y2="84.9468" gradientUnits="userSpaceOnUse" > - - + + @@ -85,7 +86,8 @@ export const AstroLight = (props: ComponentProps<'svg'>) => ( xmlns="http://www.w3.org/2000/svg" {...props} > - + Astro + ) => ( xmlns="http://www.w3.org/2000/svg" {...props} > - + Astro + ) => ( xmlns="http://www.w3.org/2000/svg" {...props} > - + TanStack + ) => ( > ); @@ -710,52 +714,93 @@ export const Frameworks = () => { with the frameworks you already use with more coming soon. -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-
- +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+ + Coming soon + +
+
handleRequest('NestJS')} + > + + +
+
handleRequest('TanStack')} > - Coming soon - - - Click on a framework to request support for it - - - Click a framework to request support - + + +
{ -
    {children}
+
    + {children} +
); diff --git a/docs/content/docs/deploying/world/local-world.mdx b/docs/content/docs/deploying/world/local-world.mdx index a2388c242..ddd7a57cb 100644 --- a/docs/content/docs/deploying/world/local-world.mdx +++ b/docs/content/docs/deploying/world/local-world.mdx @@ -62,28 +62,28 @@ By default, workflow data is stored in `.workflow-data/` in your project root. T **Environment variable:** ```bash -export WORKFLOW_EMBEDDED_DATA_DIR=./custom-workflow-data +export WORKFLOW_LOCAL_DATA_DIR=./custom-workflow-data ``` **Programmatically:** ```typescript -import { createEmbeddedWorld } from '@workflow/world-local'; +import { createLocalWorld } from '@workflow/world-local'; -const world = createEmbeddedWorld({ dataDir: './custom-workflow-data' }); +const world = createLocalWorld({ dataDir: './custom-workflow-data' }); ``` ### Port -By default, the embedded world **automatically detects** which port your application is listening on using process introspection. This works seamlessly with frameworks like SvelteKit, Vite, and others that use non-standard ports. +By default, the local world **automatically detects** which port your application is listening on using process introspection. This works seamlessly with frameworks like SvelteKit, Vite, and others that use non-standard ports. **Auto-detection example** (recommended): ```typescript -import { createEmbeddedWorld } from '@workflow/world-local'; +import { createLocalWorld } from '@workflow/world-local'; // Port is automatically detected - no configuration needed! -const world = createEmbeddedWorld(); +const world = createLocalWorld(); ``` If auto-detection fails, the world will fall back to the `PORT` environment variable, then to port `3000`. @@ -93,9 +93,9 @@ If auto-detection fails, the world will fall back to the `PORT` environment vari You can override the auto-detected port by specifying it explicitly: ```typescript -import { createEmbeddedWorld } from '@workflow/world-local'; +import { createLocalWorld } from '@workflow/world-local'; -const world = createEmbeddedWorld({ port: 3000 }); +const world = createLocalWorld({ port: 3000 }); ``` ### Base URL @@ -103,6 +103,7 @@ const world = createEmbeddedWorld({ port: 3000 }); For advanced use cases like HTTPS or custom hostnames, you can override the entire base URL. When set, this takes precedence over all port detection and configuration. **Use cases:** + - HTTPS dev servers (e.g., `next dev --experimental-https`) - Custom hostnames (e.g., `local.example.com`) - Non-localhost development @@ -110,21 +111,21 @@ For advanced use cases like HTTPS or custom hostnames, you can override the enti **Environment variable:** ```bash -export WORKFLOW_EMBEDDED_BASE_URL=https://local.example.com:3000 +export WORKFLOW_LOCAL_BASE_URL=https://local.example.com:3000 ``` **Programmatically:** ```typescript -import { createEmbeddedWorld } from '@workflow/world-local'; +import { createLocalWorld } from '@workflow/world-local'; // HTTPS -const world = createEmbeddedWorld({ +const world = createLocalWorld({ baseUrl: 'https://localhost:3000' }); // Custom hostname -const world = createEmbeddedWorld({ +const world = createLocalWorld({ baseUrl: 'https://local.example.com:3000' }); ``` @@ -147,7 +148,7 @@ npm run dev You can explicitly set the local world through environment variables: ```bash -export WORKFLOW_TARGET_WORLD=embedded +export WORKFLOW_TARGET_WORLD=local npm run dev ``` @@ -202,12 +203,12 @@ For production deployments, use the [Vercel World](/docs/deploying/world/vercel- ## API Reference -### `createEmbeddedWorld` +### `createLocalWorld` Creates a local world instance: ```typescript -function createEmbeddedWorld( +function createLocalWorld( args?: Partial<{ dataDir: string; port: number; @@ -219,9 +220,9 @@ function createEmbeddedWorld( **Parameters:** - `args` - Optional configuration object: - - `dataDir` - Directory for storing workflow data (default: `.workflow-data/` or `WORKFLOW_EMBEDDED_DATA_DIR` env var) + - `dataDir` - Directory for storing workflow data (default: `.workflow-data/` or `WORKFLOW_LOCAL_DATA_DIR` env var) - `port` - Port override for queue transport (default: auto-detected → `PORT` env var → `3000`) - - `baseUrl` - Full base URL override for queue transport (default: `http://localhost:{port}` or `WORKFLOW_EMBEDDED_BASE_URL` env var) + - `baseUrl` - Full base URL override for queue transport (default: `http://localhost:{port}` or `WORKFLOW_LOCAL_BASE_URL` env var) **Returns:** @@ -230,24 +231,24 @@ function createEmbeddedWorld( **Examples:** ```typescript -import { createEmbeddedWorld } from '@workflow/world-local'; +import { createLocalWorld } from '@workflow/world-local'; // Use all defaults (recommended - auto-detects port) -const world = createEmbeddedWorld(); +const world = createLocalWorld(); // Custom data directory -const world = createEmbeddedWorld({ dataDir: './my-data' }); +const world = createLocalWorld({ dataDir: './my-data' }); // Override port -const world = createEmbeddedWorld({ port: 3000 }); +const world = createLocalWorld({ port: 3000 }); // HTTPS with custom hostname -const world = createEmbeddedWorld({ +const world = createLocalWorld({ baseUrl: 'https://local.example.com:3000' }); // Multiple options -const world = createEmbeddedWorld({ +const world = createLocalWorld({ dataDir: './my-data', baseUrl: 'https://localhost:3000' }); @@ -258,4 +259,3 @@ const world = createEmbeddedWorld({ - [World Interface](/docs/deploying/world) - Understanding the World interface - [Vercel World](/docs/deploying/world/vercel-world) - For production deployments - [Observability](/docs/observability) - Monitoring and debugging tools - diff --git a/docs/content/docs/deploying/world/vercel-world.mdx b/docs/content/docs/deploying/world/vercel-world.mdx index 285bf4d9b..bd1e7215c 100644 --- a/docs/content/docs/deploying/world/vercel-world.mdx +++ b/docs/content/docs/deploying/world/vercel-world.mdx @@ -84,46 +84,21 @@ Each environment has isolated workflow data, ensuring that development and testi ## Remote Access Configuration -To inspect production workflows from your local machine using [observability tools](/docs/observability), you need to configure remote access. +To inspect production workflows from your local machine using [observability tools](/docs/observability), you can use CLI flags to configure remote access. -### Getting Authentication Tokens - -1. Go to [Vercel Dashboard → Settings → Tokens](https://vercel.com/account/tokens) -2. Create a new token with appropriate scopes -3. Save the token securely - -### Environment Variables - -Configure remote access via environment variables: - -```bash -# Set the target world -export WORKFLOW_TARGET_WORLD=vercel - -# Authentication token -export WORKFLOW_VERCEL_AUTH_TOKEN= - -# Environment (production, preview, development) -export WORKFLOW_VERCEL_ENV=production - -# Project ID -export WORKFLOW_VERCEL_PROJECT= - -# Team ID (if using Vercel teams) -export WORKFLOW_VERCEL_TEAM= -``` +In most cases, these flags are not necessary. The CLI automatically retrieves authentication tokens from the Vercel CLI's token storage and infers team and project IDs based on the current working directory and Vercel project linking status. ### CLI Flags -You can also pass configuration via CLI flags when using observability tools: +If you need to override the automatic configuration, you can pass configuration via CLI flags: ```bash npx workflow inspect runs \ - --backend=vercel \ - --env=production \ - --project=my-project \ - --team=my-team \ - --authToken= + --backend vercel \ + --env production \ + --project my-project \ + --team my-team \ + --authToken ``` Learn more about remote inspection in the [Observability](/docs/observability) section. diff --git a/docs/content/docs/getting-started/astro.mdx b/docs/content/docs/getting-started/astro.mdx new file mode 100644 index 000000000..459d2b84a --- /dev/null +++ b/docs/content/docs/getting-started/astro.mdx @@ -0,0 +1,233 @@ +--- +title: Astro +--- + +This guide will walk through setting up your first workflow in an Astro app. Along the way, you'll learn more about the concepts that are fundamental to using the development kit in your own projects. + +--- + + + + +## Create Your Astro Project + +Start by creating a new Astro project. This command will create a new directory named `my-workflow-app` and setup a minimal Astro project inside it. + +```bash +npm create astro@latest my-workflow-app -- --template minimal --install --yes +``` + +Enter the newly made directory: + +```bash +cd my-workflow-app +``` + +### Install `workflow` + +```package-install +npm i workflow +``` + +### Configure Astro + +Add `workflow()` to your Astro config. This enables usage of the `"use workflow"` and `"use step"` directives. + +```typescript title="astro.config.mjs" lineNumbers +// @ts-check +import { defineConfig } from "astro/config"; +import { workflow } from "workflow/astro"; + +// https://astro.build/config +export default defineConfig({ + integrations: [workflow()], +}); +``` + + + + + ### Setup IntelliSense for TypeScript (Optional) + + + +To enable helpful hints in your IDE, setup the workflow plugin in `tsconfig.json`: + +```json title="tsconfig.json" lineNumbers +{ + "compilerOptions": { + // ... rest of your TypeScript config + "plugins": [ + { + "name": "workflow" // [!code highlight] + } + ] + } +} +``` + + + + + + + + + +## Create Your First Workflow + +Create a new file for our first workflow: + +```typescript title="src/workflows/user-signup.ts" lineNumbers +import { sleep } from "workflow"; + +export async function handleUserSignup(email: string) { + "use workflow"; // [!code highlight] + + const user = await createUser(email); + await sendWelcomeEmail(user); + + await sleep("5s"); // Pause for 5s - doesn't consume any resources + await sendOnboardingEmail(user); + + return { userId: user.id, status: "onboarded" }; +} + +``` + +We'll fill in those functions next, but let's take a look at this code: + +* We define a **workflow** function with the directive `"use workflow"`. Think of the workflow function as the _orchestrator_ of individual **steps**. +* The Workflow DevKit's `sleep` function allows us to suspend execution of the workflow without using up any resources. A sleep can be a few seconds, hours, days, or even months long. + +## Create Your Workflow Steps + +Let's now define those missing functions. + +```typescript title="src/workflows/user-signup.ts" lineNumbers +import { FatalError } from "workflow" + +// Our workflow function defined earlier + +async function createUser(email: string) { + "use step"; // [!code highlight] + + console.log(`Creating user with email: ${email}`); + + // Full Node.js access - database calls, APIs, etc. + return { id: crypto.randomUUID(), email }; +} + +async function sendWelcomeEmail(user: { id: string; email: string; }) { + "use step"; // [!code highlight] + + console.log(`Sending welcome email to user: ${user.id}`); + + if (Math.random() < 0.3) { + // By default, steps will be retried for unhandled errors + throw new Error("Retryable!"); + } +} + +async function sendOnboardingEmail(user: { id: string; email: string}) { + "use step"; // [!code highlight] + + if (!user.email.includes("@")) { + // To skip retrying, throw a FatalError instead + throw new FatalError("Invalid Email"); + } + + console.log(`Sending onboarding email to user: ${user.id}`); +} +``` + +Taking a look at this code: + +* Business logic lives inside **steps**. When a step is invoked inside a **workflow**, it gets enqueued to run on a separate request while the workflow is suspended, just like `sleep`. +* If a step throws an error, like in `sendWelcomeEmail`, the step will automatically be retried until it succeeds (or hits the step's max retry count). +* Steps can throw a `FatalError` if an error is intentional and should not be retried. + + +We'll dive deeper into workflows, steps, and other ways to suspend or handle events in [Foundations](/docs/foundations). + + + + + + +## Create Your Route Handler + +To invoke your new workflow, we'll have to add your workflow to a `POST` API route handler, `src/pages/api/signup.ts` with the following code: + +```typescript title="src/pages/api/signup.ts" +import type { APIRoute } from "astro"; +import { start } from "workflow/api"; +import { handleUserSignup } from "../../workflows/user-signup"; + +export const POST: APIRoute = async ({ request }: { request: Request }) => { + const { email } = await request.json(); + + // Executes asynchronously and doesn't block your app + await start(handleUserSignup, [email]); + return Response.json({ + message: "User signup workflow started", + }); +}; + +export const prerender = false; // Don't prerender this page since it's an API route +``` + +This route handler creates a `POST` request endpoint at `/api/signup` that will trigger your workflow. + + +Workflows can be triggered from API routes or any server-side code. + + + + + + +## Run in Development + +To start your development server, run the following command in your terminal in the Vite root directory: + +```bash +npm run dev +``` + +Once your development server is running, you can trigger your workflow by running this command in the terminal: + +```bash +curl -X POST --json '{"email":"hello@example.com"}' http://localhost:5173/api/signup +``` + +Check the Astro development server logs to see your workflow execute as well as the steps that are being processed. + +Additionally, you can use the [Workflow DevKit CLI or Web UI](/docs/observability) to inspect your workflow runs and steps in detail. + +```bash +npx workflow inspect runs +# or add '--web' for an interactive Web based UI +``` + +Workflow DevKit Web UI + +--- + +## Deploying to Production + +Workflow DevKit apps currently work best when deployed to [Vercel](https://vercel.com/home) and needs no special configuration. + +To deploy your Astro project to Vercel, ensure that the [Astro Vercel adapter](https://docs.astro.build/en/guides/integrations-guide/vercel) is configured: + +```bash +npx astro add vercel +``` + +Additionally, check the [Deploying](/docs/deploying) section to learn how your workflows can be deployed elsewhere. + +## Next Steps + +* Learn more about the [Foundations](/docs/foundations). +* Check [Errors](/docs/errors) if you encounter issues. +* Explore the [API Reference](/docs/api-reference). diff --git a/docs/content/docs/getting-started/express.mdx b/docs/content/docs/getting-started/express.mdx index ce6176e6c..14d54d3a6 100644 --- a/docs/content/docs/getting-started/express.mdx +++ b/docs/content/docs/getting-started/express.mdx @@ -29,7 +29,7 @@ Initialize the project: npm init --y ``` -### Install `workflow`, `express` and `nitro` +### Install `workflow`, `express`, `nitro`, and `rollup` ```package-install npm i workflow express nitro rollup @@ -50,13 +50,13 @@ npm i -D @types/express Create a new file `nitro.config.ts` for your Nitro configuration with module `workflow/nitro`. This enables usage of the `"use workflow"` and `"use step"` directives. ```typescript title="nitro.config.ts" lineNumbers -import { defineNitroConfig } from 'nitro/config'; +import { defineNitroConfig } from "nitro/config"; export default defineNitroConfig({ - modules: ['workflow/nitro'], - vercel: { entryFormat: 'node' }, + modules: ["workflow/nitro"], + vercel: { entryFormat: "node" }, routes: { - '/**': { handler: './src/index.ts', format: "node" }, + '/**': { handler: './src/index.ts', format: 'node' }, }, }); ``` @@ -193,7 +193,6 @@ To invoke your new workflow, we'll create both the Express app and a new API rou ```typescript title="src/index.ts" import express from "express"; -import { fromNodeHandler, type NodeMiddleware } from "nitro/h3"; import { start } from "workflow/api"; import { handleUserSignup } from "../workflows/user-signup.js"; @@ -206,7 +205,7 @@ app.post("/api/signup", async (req, res) => { return res.json({ message: "User signup workflow started" }); }); -export default fromNodeHandler(app as NodeMiddleware); +export default app; ``` This route handler creates a `POST` request endpoint at `/api/signup` that will trigger your workflow. diff --git a/docs/content/docs/getting-started/fastify.mdx b/docs/content/docs/getting-started/fastify.mdx index 53a7f5c6e..6aec1989e 100644 --- a/docs/content/docs/getting-started/fastify.mdx +++ b/docs/content/docs/getting-started/fastify.mdx @@ -194,7 +194,6 @@ app.post("/api/signup", async (req, reply) => { // Wait for Fastify to be ready before handling requests await app.ready(); - export default (req: any, res: any) => { app.server.emit("request", req, res); }; diff --git a/docs/content/docs/getting-started/index.mdx b/docs/content/docs/getting-started/index.mdx index 327aa25ce..0608a9c07 100644 --- a/docs/content/docs/getting-started/index.mdx +++ b/docs/content/docs/getting-started/index.mdx @@ -3,23 +3,27 @@ title: Getting Started description: Start by choosing your framework. Each guide will walk you through the steps to install the dependencies and start running your first workflow. --- -import { Next, Nitro, SvelteKit, Nuxt, Hono, Bun, AstroDark, AstroLight, TanStack, Vite, Express, Fastify } from '@/app/(home)/components/frameworks'; +import { Next, Nitro, SvelteKit, Nuxt, Hono, Bun, AstroDark, AstroLight, TanStack, Vite, Express, Nest, Fastify } from '@/app/(home)/components/frameworks'; -
- - Next.js -
+
Next.js
-
+
Vite
+ +
+ + + Astro +
+
-
+
Express
@@ -31,34 +35,33 @@ import { Next, Nitro, SvelteKit, Nuxt, Hono, Bun, AstroDark, AstroLight, TanStac
-
+
Hono
-
+
Nitro
-
+
Nuxt
-
+
SvelteKit
- - - Astro + + NestJS Coming soon
diff --git a/packages/ai/CHANGELOG.md b/packages/ai/CHANGELOG.md index fe14e458f..787f03c8e 100644 --- a/packages/ai/CHANGELOG.md +++ b/packages/ai/CHANGELOG.md @@ -1,5 +1,19 @@ # @workflow/ai +## 4.0.1-beta.24 + +### Patch Changes + +- workflow@4.0.1-beta.24 + +## 4.0.1-beta.23 + +### Patch Changes + +- 172e015: Add AI provider step wrapper functions +- Updated dependencies [1ac5592] + - workflow@4.0.1-beta.23 + ## 4.0.1-beta.22 ### Patch Changes diff --git a/packages/ai/package.json b/packages/ai/package.json index 5de8850f1..27bd58f24 100644 --- a/packages/ai/package.json +++ b/packages/ai/package.json @@ -1,6 +1,6 @@ { "name": "@workflow/ai", - "version": "4.0.1-beta.22", + "version": "4.0.1-beta.24", "description": "Workflow DevKit compatible helper library for the AI SDK", "type": "module", "main": "dist/index.js", @@ -63,7 +63,7 @@ }, "peerDependencies": { "ai": "^5", - "workflow": "4.0.1-beta.22" + "workflow": "4.0.1-beta.24" }, "dependencies": { "@ai-sdk/anthropic": "2.0.49", diff --git a/packages/astro/CHANGELOG.md b/packages/astro/CHANGELOG.md new file mode 100644 index 000000000..dee426a77 --- /dev/null +++ b/packages/astro/CHANGELOG.md @@ -0,0 +1,24 @@ +# @workflow/astro + +## 4.0.0-beta.3 + +### Patch Changes + +- Updated dependencies [555d7a6] + - @workflow/swc-plugin@4.0.1-beta.10 + - @workflow/builders@4.0.1-beta.20 + - @workflow/rollup@4.0.0-beta.4 + +## 4.0.0-beta.2 + +### Patch Changes + +- 1ac5592: Add @workflow/astro package +- Updated dependencies [d53bf90] +- Updated dependencies [3c19e90] +- Updated dependencies [1ac5592] +- Updated dependencies [5b91861] +- Updated dependencies [0cacb99] + - @workflow/builders@4.0.1-beta.19 + - @workflow/swc-plugin@4.0.1-beta.9 + - @workflow/rollup@4.0.0-beta.3 diff --git a/packages/astro/LICENSE.md b/packages/astro/LICENSE.md new file mode 100644 index 000000000..c4d680f55 --- /dev/null +++ b/packages/astro/LICENSE.md @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2025 Vercel Inc. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/packages/astro/README.md b/packages/astro/README.md new file mode 100644 index 000000000..17a1b2eab --- /dev/null +++ b/packages/astro/README.md @@ -0,0 +1,3 @@ +# workflow/astro + +The docs have moved! Refer to them [here](https://useworkflow.dev/) diff --git a/packages/astro/package.json b/packages/astro/package.json new file mode 100644 index 000000000..f27733160 --- /dev/null +++ b/packages/astro/package.json @@ -0,0 +1,40 @@ +{ + "name": "@workflow/astro", + "version": "4.0.0-beta.3", + "description": "Astro integration for Workflow DevKit", + "type": "module", + "main": "dist/index.js", + "files": [ + "dist" + ], + "publishConfig": { + "access": "public" + }, + "license": "Apache-2.0", + "repository": { + "type": "git", + "url": "https://github.com/vercel/workflow.git", + "directory": "packages/astro" + }, + "exports": { + ".": "./dist/index.js" + }, + "scripts": { + "build": "tsc", + "dev": "tsc --watch", + "clean": "tsc --build --clean && rm -rf dist" + }, + "dependencies": { + "@swc/core": "1.11.24", + "@workflow/builders": "workspace:*", + "@workflow/swc-plugin": "workspace:*", + "@workflow/rollup": "workspace:*", + "exsolve": "^1.0.7", + "pathe": "^2.0.3" + }, + "devDependencies": { + "@types/node": "catalog:", + "@workflow/tsconfig": "workspace:*", + "astro": "5.16.0" + } +} diff --git a/packages/astro/src/builder.ts b/packages/astro/src/builder.ts new file mode 100644 index 000000000..94f9852df --- /dev/null +++ b/packages/astro/src/builder.ts @@ -0,0 +1,260 @@ +import { mkdir, readFile, writeFile } from 'node:fs/promises'; +import { join, resolve } from 'node:path'; +import { + BaseBuilder, + VercelBuildOutputAPIBuilder, + createBaseBuilderConfig, + type AstroConfig, +} from '@workflow/builders'; + +// NOTE: This is the same as SvelteKit request converter, should merge +const NORMALIZE_REQUEST_CONVERTER = ` +async function normalizeRequestConverter(request) { + const options = { + method: request.method, + headers: new Headers(request.headers) + }; + if (!['GET', 'HEAD', 'OPTIONS', 'TRACE', 'CONNECT'].includes(request.method)) { + options.body = await request.arrayBuffer(); + } + return new Request(request.url, options); +} +`; + +const WORKFLOW_ROUTES = [ + { + src: '^/\\.well-known/workflow/v1/flow/?$', + dest: '/.well-known/workflow/v1/flow', + }, + { + src: '^/\\.well-known/workflow/v1/step/?$', + dest: '/.well-known/workflow/v1/step', + }, + { + src: '^/\\.well-known/workflow/v1/webhook/([^/]+?)/?$', + dest: '/.well-known/workflow/v1/webhook/[token]', + }, +]; + +export class LocalBuilder extends BaseBuilder { + constructor() { + super({ + dirs: ['src/pages', 'src/workflows'], + buildTarget: 'astro' as const, + stepsBundlePath: '', // unused in base + workflowsBundlePath: '', // unused in base + webhookBundlePath: '', // unused in base + workingDir: process.cwd(), + debugFilePrefix: '_', // Prefix with underscore so Astro ignores debug files + }); + } + + override async build(): Promise { + const pagesDir = resolve(this.config.workingDir, 'src/pages'); + const workflowGeneratedDir = join(pagesDir, '.well-known/workflow/v1'); + + // Ensure output directories exist + await mkdir(workflowGeneratedDir, { recursive: true }); + + // Add .gitignore to exclude generated files from version control + if (process.env.VERCEL_DEPLOYMENT_ID === undefined) { + await writeFile(join(workflowGeneratedDir, '.gitignore'), '*'); + } + + // Get workflow and step files to bundle + const inputFiles = await this.getInputFiles(); + const tsConfig = await this.getTsConfigOptions(); + + const options = { + inputFiles, + workflowGeneratedDir, + tsBaseUrl: tsConfig.baseUrl, + tsPaths: tsConfig.paths, + }; + + // Generate the three Astro route handlers + await this.buildStepsRoute(options); + await this.buildWorkflowsRoute(options); + await this.buildWebhookRoute({ workflowGeneratedDir }); + } + + private async buildStepsRoute({ + inputFiles, + workflowGeneratedDir, + tsPaths, + tsBaseUrl, + }: { + inputFiles: string[]; + workflowGeneratedDir: string; + tsBaseUrl?: string; + tsPaths?: Record; + }) { + // Create steps route: .well-known/workflow/v1/step.js + const stepsRouteFile = join(workflowGeneratedDir, 'step.js'); + await this.createStepsBundle({ + format: 'esm', + inputFiles, + outfile: stepsRouteFile, + externalizeNonSteps: true, + tsBaseUrl, + tsPaths, + }); + + let stepsRouteContent = await readFile(stepsRouteFile, 'utf-8'); + + // Normalize request, needed for preserving request through astro + stepsRouteContent = stepsRouteContent.replace( + /export\s*\{\s*stepEntrypoint\s+as\s+POST\s*\}\s*;?$/m, + `${NORMALIZE_REQUEST_CONVERTER} +export const POST = async ({request}) => { + const normalRequest = await normalizeRequestConverter(request); + return stepEntrypoint(normalRequest); +} + +export const prerender = false;` + ); + await writeFile(stepsRouteFile, stepsRouteContent); + } + + private async buildWorkflowsRoute({ + inputFiles, + workflowGeneratedDir, + tsPaths, + tsBaseUrl, + }: { + inputFiles: string[]; + workflowGeneratedDir: string; + tsBaseUrl?: string; + tsPaths?: Record; + }) { + // Create workflows route: .well-known/workflow/v1/flow.js + const workflowsRouteFile = join(workflowGeneratedDir, 'flow.js'); + await this.createWorkflowsBundle({ + format: 'esm', + outfile: workflowsRouteFile, + bundleFinalOutput: false, + inputFiles, + tsBaseUrl, + tsPaths, + }); + + let workflowsRouteContent = await readFile(workflowsRouteFile, 'utf-8'); + + // Normalize request, needed for preserving request through astro + workflowsRouteContent = workflowsRouteContent.replace( + /export const POST = workflowEntrypoint\(workflowCode\);?$/m, + `${NORMALIZE_REQUEST_CONVERTER} +export const POST = async ({request}) => { + const normalRequest = await normalizeRequestConverter(request); + return workflowEntrypoint(workflowCode)(normalRequest); +} + +export const prerender = false;` + ); + await writeFile(workflowsRouteFile, workflowsRouteContent); + } + + private async buildWebhookRoute({ + workflowGeneratedDir, + }: { + workflowGeneratedDir: string; + }) { + // Create webhook route: .well-known/workflow/v1/webhook/[token].js + const webhookRouteFile = join(workflowGeneratedDir, 'webhook/[token].js'); + + await this.createWebhookBundle({ + outfile: webhookRouteFile, + bundle: false, + }); + + // Post-process the generated file to wrap with Astro request converter + let webhookRouteContent = await readFile(webhookRouteFile, 'utf-8'); + + // Update handler signature to accept token as parameter + webhookRouteContent = webhookRouteContent.replace( + /async function handler\(request\) \{[\s\S]*?const token = decodeURIComponent\(pathParts\[pathParts\.length - 1\]\);/, + `async function handler(request, token) {` + ); + + // Remove the URL parsing code since we get token from params + webhookRouteContent = webhookRouteContent.replace( + /const url = new URL\(request\.url\);[\s\S]*?const pathParts = url\.pathname\.split\('\/'\);[\s\S]*?\n/, + '' + ); + + // Normalize request, needed for preserving request through astro + webhookRouteContent = webhookRouteContent.replace( + /export const GET = handler;\nexport const POST = handler;\nexport const PUT = handler;\nexport const PATCH = handler;\nexport const DELETE = handler;\nexport const HEAD = handler;\nexport const OPTIONS = handler;/, + `${NORMALIZE_REQUEST_CONVERTER} +const createHandler = (method) => async ({ request, params, platform }) => { + const normalRequest = await normalizeRequestConverter(request); + const response = await handler(normalRequest, params.token); + return response; +}; + +export const GET = createHandler('GET'); +export const POST = createHandler('POST'); +export const PUT = createHandler('PUT'); +export const PATCH = createHandler('PATCH'); +export const DELETE = createHandler('DELETE'); +export const HEAD = createHandler('HEAD'); +export const OPTIONS = createHandler('OPTIONS'); + +export const prerender = false;` + ); + + await writeFile(webhookRouteFile, webhookRouteContent); + } +} + +export class VercelBuilder extends VercelBuildOutputAPIBuilder { + constructor(config?: Partial) { + const workingDir = config?.workingDir || process.cwd(); + super({ + ...createBaseBuilderConfig({ + workingDir, + dirs: ['src/pages', 'src/workflows'], + }), + buildTarget: 'vercel-build-output-api', + debugFilePrefix: '_', + }); + } + + override async build(): Promise { + const configPath = join( + this.config.workingDir, + '.vercel/output/config.json' + ); + + // The config output by astro + const config = JSON.parse(await readFile(configPath, 'utf-8')); + + // Filter out existing workflow routes (wrong `dest` mapping) + config.routes = config.routes.filter( + (route: { src?: string; dest: string }) => + !route.src?.includes('.well-known/workflow') + ); + + // Find the index right after the "filesystem" handler and "continue: true" routes + let insertIndex = config.routes.findIndex( + (route: any) => route.handle === 'filesystem' + ); + + // Move past any routes with "continue: true" (like _astro cache headers) + while ( + insertIndex < config.routes.length - 1 && + config.routes[insertIndex + 1]?.continue === true + ) { + insertIndex++; + } + + // Insert workflow routes right after + config.routes.splice(insertIndex + 1, 0, ...WORKFLOW_ROUTES); + + // Bundles workflows for vercel + await super.build(); + + // Use old astro config with updated routes + await writeFile(configPath, JSON.stringify(config, null, 2)); + } +} diff --git a/packages/astro/src/index.ts b/packages/astro/src/index.ts new file mode 100644 index 000000000..311a173ef --- /dev/null +++ b/packages/astro/src/index.ts @@ -0,0 +1 @@ +export { workflowPlugin as workflow } from './plugin.js'; diff --git a/packages/astro/src/plugin.ts b/packages/astro/src/plugin.ts new file mode 100644 index 000000000..ad40ec5fd --- /dev/null +++ b/packages/astro/src/plugin.ts @@ -0,0 +1,83 @@ +import type { AstroIntegration, HookParameters } from 'astro'; +import { LocalBuilder, VercelBuilder } from './builder.js'; +import { workflowTransformPlugin } from '@workflow/rollup'; +import { createBuildQueue } from '@workflow/builders'; + +export function workflowPlugin(): AstroIntegration { + const builder = new LocalBuilder(); + const enqueue = createBuildQueue(); + + return { + name: 'workflow:astro', + hooks: { + 'astro:config:setup': async ({ + updateConfig, + }: HookParameters<'astro:config:setup'>) => { + // Use local builder + if (!process.env.VERCEL_DEPLOYMENT_ID) { + try { + await builder.build(); + } catch (buildError) { + // Build might fail due to invalid workflow files or missing dependencies + // Log the error and rethrow to properly propagate to Astro + console.error('Build failed during config setup:', buildError); + throw buildError; + } + } + updateConfig({ + vite: { + plugins: [ + workflowTransformPlugin(), + { + name: 'workflow:vite', + + // TODO: Move this to @workflow/vite or something since this is vite specific + async hotUpdate(options) { + const { file, read } = options; + + // Check if this is a TS/JS file that might contain workflow directives + const jsTsRegex = /\.(ts|tsx|js|jsx|mjs|cjs)$/; + if (!jsTsRegex.test(file)) { + return; + } + + // Read the file to check for workflow/step directives + let content: string; + try { + content = await read(); + } catch { + // File might have been deleted - trigger rebuild to update generated routes + console.log('Workflow file changed, rebuilding...'); + await enqueue(() => builder.build()); + return; + } + + const useWorkflowPattern = /^\s*(['"])use workflow\1;?\s*$/m; + const useStepPattern = /^\s*(['"])use step\1;?\s*$/m; + + if ( + !useWorkflowPattern.test(content) && + !useStepPattern.test(content) + ) { + return; + } + + console.log('Workflow file changed, rebuilding...'); + await enqueue(() => builder.build()); + // Let Vite handle the normal HMR for the changed file + return; + }, + }, + ], + }, + }); + }, + 'astro:build:done': async () => { + if (process.env.VERCEL_DEPLOYMENT_ID) { + const vercelBuilder = new VercelBuilder(); + await vercelBuilder.build(); + } + }, + }, + }; +} diff --git a/packages/astro/tsconfig.json b/packages/astro/tsconfig.json new file mode 100644 index 000000000..ba5d9aec0 --- /dev/null +++ b/packages/astro/tsconfig.json @@ -0,0 +1,12 @@ +{ + "extends": "@workflow/tsconfig/base.json", + "compilerOptions": { + "outDir": "dist", + "target": "es2022", + "module": "preserve", + "baseUrl": ".", + "moduleResolution": "bundler" + }, + "include": ["src"], + "exclude": ["node_modules", "**/*.test.ts"] +} diff --git a/packages/builders/CHANGELOG.md b/packages/builders/CHANGELOG.md index 3087d8377..e124fa1f7 100644 --- a/packages/builders/CHANGELOG.md +++ b/packages/builders/CHANGELOG.md @@ -1,5 +1,30 @@ # @workflow/builders +## 4.0.1-beta.20 + +### Patch Changes + +- Updated dependencies [2f0840b] +- Updated dependencies [555d7a6] + - @workflow/core@4.0.1-beta.21 + - @workflow/swc-plugin@4.0.1-beta.10 + +## 4.0.1-beta.19 + +### Patch Changes + +- d53bf90: Fix StandaloneBuilder to scan all directories for workflows +- 3c19e90: Fix Nitro and SvelteKit build race conditions and make writing debug file atomic +- 1ac5592: Add @workflow/astro package +- Updated dependencies [0f1645b] +- Updated dependencies [5b91861] +- Updated dependencies [bdde1bd] +- Updated dependencies [0cacb99] +- Updated dependencies [8d4562e] + - @workflow/core@4.0.1-beta.20 + - @workflow/swc-plugin@4.0.1-beta.9 + - @workflow/errors@4.0.1-beta.7 + ## 4.0.1-beta.18 ### Patch Changes diff --git a/packages/builders/package.json b/packages/builders/package.json index 3e34ffcb6..880e254f4 100644 --- a/packages/builders/package.json +++ b/packages/builders/package.json @@ -1,6 +1,6 @@ { "name": "@workflow/builders", - "version": "4.0.1-beta.18", + "version": "4.0.1-beta.20", "description": "Shared builder infrastructure for Workflow DevKit", "type": "module", "main": "./dist/index.js", diff --git a/packages/builders/src/base-builder.ts b/packages/builders/src/base-builder.ts index 30fe493bc..0d57017b6 100644 --- a/packages/builders/src/base-builder.ts +++ b/packages/builders/src/base-builder.ts @@ -1,6 +1,6 @@ import { randomUUID } from 'node:crypto'; import { mkdir, readFile, rename, writeFile } from 'node:fs/promises'; -import { dirname, join, relative, resolve } from 'node:path'; +import { basename, dirname, join, relative, resolve } from 'node:path'; import { promisify } from 'node:util'; import chalk from 'chalk'; import { parse } from 'comment-json'; @@ -185,7 +185,8 @@ export abstract class BaseBuilder { debugData: object, merge?: boolean ): Promise { - const targetPath = `${outfile}.debug.json`; + const prefix = this.config.debugFilePrefix || ''; + const targetPath = `${dirname(outfile)}/${prefix}${basename(outfile)}.debug.json`; let existing = {}; try { diff --git a/packages/builders/src/index.ts b/packages/builders/src/index.ts index 0fbbf5cb9..27d3fe1b5 100644 --- a/packages/builders/src/index.ts +++ b/packages/builders/src/index.ts @@ -8,6 +8,7 @@ export { createNodeModuleErrorPlugin } from './node-module-esbuild-plugin.js'; export { StandaloneBuilder } from './standalone.js'; export { createSwcPlugin } from './swc-esbuild-plugin.js'; export type { + AstroConfig, BuildTarget, NextConfig, StandaloneConfig, diff --git a/packages/builders/src/types.ts b/packages/builders/src/types.ts index 5c514142f..87c0d5428 100644 --- a/packages/builders/src/types.ts +++ b/packages/builders/src/types.ts @@ -3,6 +3,7 @@ export const validBuildTargets = [ 'vercel-build-output-api', 'next', 'sveltekit', + 'astro', ] as const; export type BuildTarget = (typeof validBuildTargets)[number]; @@ -22,6 +23,9 @@ interface BaseWorkflowConfig { externalPackages?: string[]; workflowManifestPath?: string; + + // Optional prefix for debug files (e.g., "_" for Astro to ignore them) + debugFilePrefix?: string; } /** @@ -66,6 +70,17 @@ export interface SvelteKitConfig extends BaseWorkflowConfig { webhookBundlePath: string; } +/** + * Configuration for Astro builds. + */ +export interface AstroConfig extends BaseWorkflowConfig { + buildTarget: 'astro'; + // Astro builder computes paths dynamically, so these are not used + stepsBundlePath: string; + workflowsBundlePath: string; + webhookBundlePath: string; +} + /** * Discriminated union of all builder configuration types. */ @@ -73,7 +88,8 @@ export type WorkflowConfig = | StandaloneConfig | VercelBuildOutputConfig | NextConfig - | SvelteKitConfig; + | SvelteKitConfig + | AstroConfig; export function isValidBuildTarget( target: string | undefined diff --git a/packages/cli/CHANGELOG.md b/packages/cli/CHANGELOG.md index a3717c94d..8369c6fea 100644 --- a/packages/cli/CHANGELOG.md +++ b/packages/cli/CHANGELOG.md @@ -1,5 +1,45 @@ # @workflow/cli +## 4.0.1-beta.24 + +### Patch Changes + +- Updated dependencies [2f0840b] +- Updated dependencies [555d7a6] +- Updated dependencies [e9494d5] + - @workflow/core@4.0.1-beta.21 + - @workflow/swc-plugin@4.0.1-beta.10 + - @workflow/world-vercel@4.0.1-beta.15 + - @workflow/builders@4.0.1-beta.20 + - @workflow/web@4.0.1-beta.12 + +## 4.0.1-beta.23 + +### Patch Changes + +- 8d4562e: Rename leftover references to "embedded world" to be "local world" +- Updated dependencies [d53bf90] +- Updated dependencies [0f1645b] +- Updated dependencies [3c19e90] +- Updated dependencies [1ac5592] +- Updated dependencies [6e8e828] +- Updated dependencies [5b91861] +- Updated dependencies [10c5b91] +- Updated dependencies [bdde1bd] +- Updated dependencies [109fe59] +- Updated dependencies [0cacb99] +- Updated dependencies [2faddf3] +- Updated dependencies [10c5b91] +- Updated dependencies [8d4562e] + - @workflow/builders@4.0.1-beta.19 + - @workflow/core@4.0.1-beta.20 + - @workflow/world-local@4.0.1-beta.14 + - @workflow/swc-plugin@4.0.1-beta.9 + - @workflow/world@4.0.1-beta.8 + - @workflow/web@4.0.1-beta.12 + - @workflow/errors@4.0.1-beta.7 + - @workflow/world-vercel@4.0.1-beta.14 + ## 4.0.1-beta.22 ### Patch Changes @@ -284,7 +324,7 @@ ### Patch Changes - 66332f2: Rename vercel-static builder to standalone -- dbf2207: Fix --backend flag not finding world when using embedded world package name explicitly +- dbf2207: Fix --backend flag not finding world when using local world package name explicitly - Updated dependencies [dbf2207] - Updated dependencies [eadf588] - @workflow/web@4.0.1-beta.4 diff --git a/packages/cli/package.json b/packages/cli/package.json index 68a4d8893..50049e584 100644 --- a/packages/cli/package.json +++ b/packages/cli/package.json @@ -1,6 +1,6 @@ { "name": "@workflow/cli", - "version": "4.0.1-beta.22", + "version": "4.0.1-beta.24", "description": "Command-line interface for Workflow DevKit", "type": "module", "bin": { diff --git a/packages/cli/src/lib/inspect/env.ts b/packages/cli/src/lib/inspect/env.ts index fccb83b90..89fb80c44 100644 --- a/packages/cli/src/lib/inspect/env.ts +++ b/packages/cli/src/lib/inspect/env.ts @@ -43,7 +43,7 @@ export const getEnvVars = (): Record => { WORKFLOW_VERCEL_PROXY_URL: env.WORKFLOW_VERCEL_PROXY_URL || '', WORKFLOW_LOCAL_UI: env.WORKFLOW_LOCAL_UI || '', PORT: env.PORT || '', - WORKFLOW_EMBEDDED_DATA_DIR: env.WORKFLOW_EMBEDDED_DATA_DIR || '', + WORKFLOW_LOCAL_DATA_DIR: env.WORKFLOW_LOCAL_DATA_DIR || '', }; }; @@ -75,10 +75,10 @@ async function findWorkflowDataDir(cwd: string) { } /** - * Overwrites process.env variables related to embedded world configuration, + * Overwrites process.env variables related to local world configuration, * if relevant environment variables aren't set already. */ -export const inferEmbeddedWorldEnvVars = async () => { +export const inferLocalWorldEnvVars = async () => { const envVars = getEnvVars(); if (!envVars.PORT) { logger.warn( @@ -89,12 +89,12 @@ export const inferEmbeddedWorldEnvVars = async () => { } // Paths to check, in order of preference - if (!envVars.WORKFLOW_EMBEDDED_DATA_DIR) { + if (!envVars.WORKFLOW_LOCAL_DATA_DIR) { const cwd = getWorkflowConfig().workingDir; const localPath = await findWorkflowDataDir(cwd); if (localPath) { - envVars.WORKFLOW_EMBEDDED_DATA_DIR = localPath; + envVars.WORKFLOW_LOCAL_DATA_DIR = localPath; writeEnvVars(envVars); return; } @@ -104,7 +104,7 @@ export const inferEmbeddedWorldEnvVars = async () => { if (repoRoot) { const repoPath = await findWorkflowDataDir(repoRoot); if (repoPath) { - envVars.WORKFLOW_EMBEDDED_DATA_DIR = repoPath; + envVars.WORKFLOW_LOCAL_DATA_DIR = repoPath; writeEnvVars(envVars); return; } diff --git a/packages/cli/src/lib/inspect/flags.ts b/packages/cli/src/lib/inspect/flags.ts index ec9289d44..0d84d28ac 100644 --- a/packages/cli/src/lib/inspect/flags.ts +++ b/packages/cli/src/lib/inspect/flags.ts @@ -30,12 +30,12 @@ export const cliFlags = { description: 'backend to inspect', required: false, char: 'b', - default: 'embedded', + default: 'local', env: 'WORKFLOW_TARGET_WORLD', helpGroup: 'Target', helpLabel: '-b, --backend', - helpValue: ['embedded', 'vercel', ''], - defaultHelp: 'embedded', + helpValue: ['local', 'vercel', ''], + defaultHelp: 'local', }), authToken: Flags.string({ description: diff --git a/packages/cli/src/lib/inspect/setup.ts b/packages/cli/src/lib/inspect/setup.ts index b88f6ca04..6db7cf044 100644 --- a/packages/cli/src/lib/inspect/setup.ts +++ b/packages/cli/src/lib/inspect/setup.ts @@ -3,7 +3,7 @@ import chalk from 'chalk'; import terminalLink from 'terminal-link'; import { logger, setJsonMode, setVerboseMode } from '../config/log.js'; import { - inferEmbeddedWorldEnvVars, + inferLocalWorldEnvVars, inferVercelEnvVars, writeEnvVars, } from './env.js'; @@ -52,10 +52,10 @@ export const setupCliWorld = async ( ) { await inferVercelEnvVars(); } else if ( - flags.backend === 'embedded' || + flags.backend === 'local' || flags.backend === '@workflow/world-local' ) { - await inferEmbeddedWorldEnvVars(); + await inferLocalWorldEnvVars(); } logger.debug('Initializing world'); diff --git a/packages/cli/src/lib/inspect/web.ts b/packages/cli/src/lib/inspect/web.ts index 2ffc6a515..037d86c05 100644 --- a/packages/cli/src/lib/inspect/web.ts +++ b/packages/cli/src/lib/inspect/web.ts @@ -369,7 +369,7 @@ function envToQueryParams( WORKFLOW_VERCEL_PROJECT: 'project', WORKFLOW_VERCEL_TEAM: 'team', PORT: 'port', - WORKFLOW_EMBEDDED_DATA_DIR: 'dataDir', + WORKFLOW_LOCAL_DATA_DIR: 'dataDir', }; for (const [envName, paramName] of Object.entries(envToQueryParamMappings)) { diff --git a/packages/core/CHANGELOG.md b/packages/core/CHANGELOG.md index 2d11697f7..2f095e7e3 100644 --- a/packages/core/CHANGELOG.md +++ b/packages/core/CHANGELOG.md @@ -1,5 +1,34 @@ # @workflow/core +## 4.0.1-beta.21 + +### Patch Changes + +- 2f0840b: Better error when passing an invalid workflow value to `start()` +- Updated dependencies [e9494d5] + - @workflow/world-vercel@4.0.1-beta.15 + +## 4.0.1-beta.20 + +### Patch Changes + +- 0f1645b: Ignore rejections in `waitedUntil` promise +- bdde1bd: track queue overhead with opentelemetry +- 8d4562e: Rename leftover references to "embedded world" to be "local world" +- Updated dependencies [bc9b628] +- Updated dependencies [34f3f86] +- Updated dependencies [cd451e0] +- Updated dependencies [6e8e828] +- Updated dependencies [10c5b91] +- Updated dependencies [bdde1bd] +- Updated dependencies [2faddf3] +- Updated dependencies [8d4562e] + - @workflow/utils@4.0.1-beta.5 + - @workflow/world-local@4.0.1-beta.14 + - @workflow/world@4.0.1-beta.8 + - @workflow/errors@4.0.1-beta.7 + - @workflow/world-vercel@4.0.1-beta.14 + ## 4.0.1-beta.19 ### Patch Changes diff --git a/packages/core/e2e/e2e.test.ts b/packages/core/e2e/e2e.test.ts index 92336b08e..0fd1bfeaf 100644 --- a/packages/core/e2e/e2e.test.ts +++ b/packages/core/e2e/e2e.test.ts @@ -600,7 +600,8 @@ describe('e2e', () => { // TODO: Investigate esbuild source map generation for bundled modules const isViteBasedFrameworkDevMode = (process.env.APP_NAME === 'sveltekit' || - process.env.APP_NAME === 'vite') && + process.env.APP_NAME === 'vite' || + process.env.APP_NAME === 'astro') && isLocalDeployment(); if (!isViteBasedFrameworkDevMode) { @@ -776,4 +777,49 @@ describe('e2e', () => { expect(returnValue).toBe('Result: 21'); } ); + + test( + 'spawnWorkflowFromStepWorkflow - spawning a child workflow using start() inside a step', + { timeout: 120_000 }, + async () => { + // This workflow spawns another workflow using start() inside a step function + // This is the recommended pattern for spawning workflows from within workflows + const inputValue = 42; + const run = await triggerWorkflow('spawnWorkflowFromStepWorkflow', [ + inputValue, + ]); + const returnValue = await getWorkflowReturnValue(run.runId); + + // Verify the parent workflow completed + expect(returnValue).toHaveProperty('parentInput'); + expect(returnValue.parentInput).toBe(inputValue); + + // Verify the child workflow was spawned + expect(returnValue).toHaveProperty('childRunId'); + expect(typeof returnValue.childRunId).toBe('string'); + expect(returnValue.childRunId.startsWith('wrun_')).toBe(true); + + // Verify the child workflow completed and returned the expected result + expect(returnValue).toHaveProperty('childResult'); + expect(returnValue.childResult).toEqual({ + childResult: inputValue * 2, // doubleValue(42) = 84 + originalValue: inputValue, + }); + + // Verify both runs completed successfully via CLI + const { json: parentRunData } = await cliInspectJson( + `runs ${run.runId} --withData` + ); + expect(parentRunData.status).toBe('completed'); + + const { json: childRunData } = await cliInspectJson( + `runs ${returnValue.childRunId} --withData` + ); + expect(childRunData.status).toBe('completed'); + expect(childRunData.output).toEqual({ + childResult: inputValue * 2, + originalValue: inputValue, + }); + } + ); }); diff --git a/packages/core/e2e/local-build.test.ts b/packages/core/e2e/local-build.test.ts index 0eb264c9b..b8f01e858 100644 --- a/packages/core/e2e/local-build.test.ts +++ b/packages/core/e2e/local-build.test.ts @@ -15,6 +15,7 @@ describe.each([ 'hono', 'express', 'fastify', + 'astro', ])('e2e', (project) => { test('builds without errors', { timeout: 180_000 }, async () => { // skip if we're targeting specific app to test diff --git a/packages/core/package.json b/packages/core/package.json index a8ba0228f..9b3a0c9d8 100644 --- a/packages/core/package.json +++ b/packages/core/package.json @@ -1,6 +1,6 @@ { "name": "@workflow/core", - "version": "4.0.1-beta.19", + "version": "4.0.1-beta.21", "description": "Core runtime and engine for Workflow DevKit", "type": "module", "main": "dist/index.js", @@ -41,7 +41,7 @@ "build": "tsc", "dev": "tsc --watch", "clean": "tsc --build --clean && rm -r dist ||:", - "test": "cross-env WORKFLOW_TARGET_WORLD=embedded vitest run src", + "test": "cross-env WORKFLOW_TARGET_WORLD=local vitest run src", "test:e2e": "vitest run e2e", "typecheck": "tsc --noEmit" }, diff --git a/packages/core/src/observability.ts b/packages/core/src/observability.ts index 1a7e7b3fb..e4acbfac0 100644 --- a/packages/core/src/observability.ts +++ b/packages/core/src/observability.ts @@ -30,6 +30,18 @@ const streamToStreamId = (value: any): string => { return `${STREAM_ID_PREFIX}null`; }; +const serializedStepFunctionToString = (value: unknown): string => { + if (!value) return 'null'; + if (typeof value !== 'object') return 'null'; + if ('stepId' in value) { + const stepId = value.stepId; + // TODO: Add closure vars to the string representation. + // value.closureVars + return ``; + } + return ''; +}; + /** * This is an extra reviver for devalue that takes any streams that would be converted, * into actual streams, and instead formats them as string links for printing in CLI output. @@ -41,7 +53,7 @@ const streamPrintRevivers: Record any> = { ReadableStream: streamToStreamId, WritableStream: streamToStreamId, TransformStream: streamToStreamId, - StepFunction: (value) => ``, + StepFunction: serializedStepFunctionToString, }; const hydrateStepIO = < diff --git a/packages/core/src/runtime.ts b/packages/core/src/runtime.ts index edf5e8a8e..849c77bb5 100644 --- a/packages/core/src/runtime.ts +++ b/packages/core/src/runtime.ts @@ -9,24 +9,20 @@ import { WorkflowRuntimeError, } from '@workflow/errors'; import { getPort } from '@workflow/utils/get-port'; -import type { - Event, - WorkflowRun, - WorkflowRunStatus, - World, +import { + type Event, + StepInvokePayloadSchema, + WorkflowInvokePayloadSchema, + type WorkflowRun, + type WorkflowRunStatus, + type World, } from '@workflow/world'; import { WorkflowSuspension } from './global.js'; import { runtimeLogger } from './logger.js'; import { parseWorkflowName } from './parse-name.js'; import { getStepFunction } from './private.js'; import { getWorld, getWorldHandlers } from './runtime/world.js'; -import { - type Serializable, - type StepInvokePayload, - StepInvokePayloadSchema, - type WorkflowInvokePayload, - WorkflowInvokePayloadSchema, -} from './schemas.js'; +import type { Serializable } from './schemas.js'; import { dehydrateStepArguments, dehydrateStepReturnValue, @@ -38,7 +34,13 @@ import { remapErrorStack } from './source-map.js'; // TODO: move step handler out to a separate file import { contextStorage } from './step/context-storage.js'; import * as Attribute from './telemetry/semantic-conventions.js'; -import { serializeTraceCarrier, trace, withTraceContext } from './telemetry.js'; +import { + getSpanKind, + linkToCurrentContext, + serializeTraceCarrier, + trace, + withTraceContext, +} from './telemetry.js'; import { getErrorName, getErrorStack } from './types.js'; import { buildWorkflowSuspensionMessage, @@ -271,297 +273,313 @@ export function workflowEntrypoint(workflowCode: string) { return getWorldHandlers().createQueueHandler( '__wkf_workflow_', async (message_, metadata) => { - const { runId, traceCarrier: traceContext } = - WorkflowInvokePayloadSchema.parse(message_); + const { + runId, + traceCarrier: traceContext, + requestedAt, + } = WorkflowInvokePayloadSchema.parse(message_); // Extract the workflow name from the topic name const workflowName = metadata.queueName.slice('__wkf_workflow_'.length); + const spanLinks = await linkToCurrentContext(); // Invoke user workflow within the propagated trace context return await withTraceContext(traceContext, async () => { const world = getWorld(); - return trace(`WORKFLOW ${workflowName}`, async (span) => { - span?.setAttributes({ - ...Attribute.WorkflowName(workflowName), - ...Attribute.WorkflowOperation('execute'), - ...Attribute.QueueName(metadata.queueName), - }); - - // TODO: validate `workflowName` exists before consuming message? - - span?.setAttributes({ - ...Attribute.WorkflowRunId(runId), - ...Attribute.WorkflowTracePropagated(!!traceContext), - }); - - let workflowStartedAt = -1; - try { - let workflowRun = await world.runs.get(runId); - - if (workflowRun.status === 'pending') { - workflowRun = await world.runs.update(runId, { - // This sets the `startedAt` timestamp at the database level - status: 'running', - }); - } - - // At this point, the workflow is "running" and `startedAt` should - // definitely be set. - if (!workflowRun.startedAt) { - throw new Error( - `Workflow run "${runId}" has no "startedAt" timestamp` - ); - } - workflowStartedAt = +workflowRun.startedAt; - + return trace( + `WORKFLOW ${workflowName}`, + { links: spanLinks }, + async (span) => { span?.setAttributes({ - ...Attribute.WorkflowRunStatus(workflowRun.status), - ...Attribute.WorkflowStartedAt(workflowStartedAt), + ...Attribute.WorkflowName(workflowName), + ...Attribute.WorkflowOperation('execute'), + ...Attribute.QueueName(metadata.queueName), + ...Attribute.QueueMessageId(metadata.messageId), + ...getQueueOverhead({ requestedAt }), }); - if (workflowRun.status !== 'running') { - // Workflow has already completed or failed, so we can skip it - console.warn( - `Workflow "${runId}" has status "${workflowRun.status}", skipping` - ); + // TODO: validate `workflowName` exists before consuming message? - // TODO: for `cancel`, we actually want to propagate a WorkflowCancelled event - // inside the workflow context so the user can gracefully exit. this is SIGTERM - // TODO: furthermore, there should be a timeout or a way to force cancel SIGKILL - // so that we actually exit here without replaying the workflow at all, in the case - // the replaying the workflow is itself failing. + span?.setAttributes({ + ...Attribute.WorkflowRunId(runId), + ...Attribute.WorkflowTracePropagated(!!traceContext), + }); - return; - } + let workflowStartedAt = -1; + try { + let workflowRun = await world.runs.get(runId); - // Load all events into memory before running - const events = await getAllWorkflowRunEvents(workflowRun.runId); + if (workflowRun.status === 'pending') { + workflowRun = await world.runs.update(runId, { + // This sets the `startedAt` timestamp at the database level + status: 'running', + }); + } - // Check for any elapsed waits and create wait_completed events - const now = Date.now(); - for (const event of events) { - if (event.eventType === 'wait_created') { - const resumeAt = event.eventData.resumeAt as Date; - const hasCompleted = events.some( - (e) => - e.eventType === 'wait_completed' && - e.correlationId === event.correlationId + // At this point, the workflow is "running" and `startedAt` should + // definitely be set. + if (!workflowRun.startedAt) { + throw new Error( + `Workflow run "${runId}" has no "startedAt" timestamp` ); - - // If wait has elapsed and hasn't been completed yet - if (!hasCompleted && now >= resumeAt.getTime()) { - const completedEvent = await world.events.create(runId, { - eventType: 'wait_completed', - correlationId: event.correlationId, - }); - // Add the event to the events array so the workflow can see it - events.push(completedEvent); - } } - } + workflowStartedAt = +workflowRun.startedAt; + + span?.setAttributes({ + ...Attribute.WorkflowRunStatus(workflowRun.status), + ...Attribute.WorkflowStartedAt(workflowStartedAt), + }); - const result = await runWorkflow(workflowCode, workflowRun, events); + if (workflowRun.status !== 'running') { + // Workflow has already completed or failed, so we can skip it + console.warn( + `Workflow "${runId}" has status "${workflowRun.status}", skipping` + ); - // Update the workflow run with the result - await world.runs.update(runId, { - status: 'completed', - output: result as Serializable, - }); + // TODO: for `cancel`, we actually want to propagate a WorkflowCancelled event + // inside the workflow context so the user can gracefully exit. this is SIGTERM + // TODO: furthermore, there should be a timeout or a way to force cancel SIGKILL + // so that we actually exit here without replaying the workflow at all, in the case + // the replaying the workflow is itself failing. - span?.setAttributes({ - ...Attribute.WorkflowRunStatus('completed'), - ...Attribute.WorkflowEventsCount(events.length), - }); - } catch (err) { - if (WorkflowSuspension.is(err)) { - const suspensionMessage = buildWorkflowSuspensionMessage( - runId, - err.stepCount, - err.hookCount, - err.waitCount - ); - if (suspensionMessage) { - // Note: suspensionMessage logged only in debug mode to avoid production noise - // console.debug(suspensionMessage); + return; } - // Process each operation in the queue (steps and hooks) - let minTimeoutSeconds: number | null = null; - for (const queueItem of err.steps) { - if (queueItem.type === 'step') { - // Handle step operations - const ops: Promise[] = []; - const dehydratedInput = dehydrateStepArguments( - { - args: queueItem.args, - closureVars: queueItem.closureVars, - }, - err.globalThis + + // Load all events into memory before running + const events = await getAllWorkflowRunEvents(workflowRun.runId); + + // Check for any elapsed waits and create wait_completed events + const now = Date.now(); + for (const event of events) { + if (event.eventType === 'wait_created') { + const resumeAt = event.eventData.resumeAt as Date; + const hasCompleted = events.some( + (e) => + e.eventType === 'wait_completed' && + e.correlationId === event.correlationId ); - try { - const step = await world.steps.create(runId, { - stepId: queueItem.correlationId, - stepName: queueItem.stepName, - input: dehydratedInput as Serializable, + // If wait has elapsed and hasn't been completed yet + if (!hasCompleted && now >= resumeAt.getTime()) { + const completedEvent = await world.events.create(runId, { + eventType: 'wait_completed', + correlationId: event.correlationId, }); + // Add the event to the events array so the workflow can see it + events.push(completedEvent); + } + } + } - waitUntil( - Promise.all(ops).catch((err) => { - // Ignore expected client disconnect errors (e.g., browser refresh during streaming) - const isAbortError = - err?.name === 'AbortError' || - err?.name === 'ResponseAborted'; - if (!isAbortError) throw err; - }) - ); + const result = await runWorkflow( + workflowCode, + workflowRun, + events + ); - await world.queue( - `__wkf_step_${queueItem.stepName}`, - { - workflowName, - workflowRunId: runId, - workflowStartedAt, - stepId: step.stepId, - traceCarrier: await serializeTraceCarrier(), - } satisfies StepInvokePayload, + // Update the workflow run with the result + await world.runs.update(runId, { + status: 'completed', + output: result as Serializable, + }); + + span?.setAttributes({ + ...Attribute.WorkflowRunStatus('completed'), + ...Attribute.WorkflowEventsCount(events.length), + }); + } catch (err) { + if (WorkflowSuspension.is(err)) { + const suspensionMessage = buildWorkflowSuspensionMessage( + runId, + err.stepCount, + err.hookCount, + err.waitCount + ); + if (suspensionMessage) { + // Note: suspensionMessage logged only in debug mode to avoid production noise + // console.debug(suspensionMessage); + } + // Process each operation in the queue (steps and hooks) + let minTimeoutSeconds: number | null = null; + for (const queueItem of err.steps) { + if (queueItem.type === 'step') { + // Handle step operations + const ops: Promise[] = []; + const dehydratedInput = dehydrateStepArguments( { - idempotencyKey: queueItem.correlationId, - } + args: queueItem.args, + closureVars: queueItem.closureVars, + }, + err.globalThis ); - } catch (err) { - if (WorkflowAPIError.is(err) && err.status === 409) { - // Step already exists, so we can skip it - console.warn( - `Step "${queueItem.stepName}" with correlation ID "${queueItem.correlationId}" already exists, skipping: ${err.message}` + + try { + const step = await world.steps.create(runId, { + stepId: queueItem.correlationId, + stepName: queueItem.stepName, + input: dehydratedInput as Serializable, + }); + + waitUntil( + Promise.all(ops).catch((err) => { + // Ignore expected client disconnect errors (e.g., browser refresh during streaming) + const isAbortError = + err?.name === 'AbortError' || + err?.name === 'ResponseAborted'; + if (!isAbortError) throw err; + }) ); - continue; - } - throw err; - } - } else if (queueItem.type === 'hook') { - // Handle hook operations - try { - // Create hook in database - const hookMetadata = - typeof queueItem.metadata === 'undefined' - ? undefined - : dehydrateStepArguments( - queueItem.metadata, - err.globalThis - ); - await world.hooks.create(runId, { - hookId: queueItem.correlationId, - token: queueItem.token, - metadata: hookMetadata, - }); - // Create hook_created event in event log - await world.events.create(runId, { - eventType: 'hook_created', - correlationId: queueItem.correlationId, - }); - } catch (err) { - if (WorkflowAPIError.is(err)) { - if (err.status === 409) { - // Hook already exists (duplicate hook_id constraint), so we can skip it - console.warn( - `Hook with correlation ID "${queueItem.correlationId}" already exists, skipping: ${err.message}` - ); - continue; - } else if (err.status === 410) { - // Workflow has already completed, so no-op + await queueMessage( + world, + `__wkf_step_${queueItem.stepName}`, + { + workflowName, + workflowRunId: runId, + workflowStartedAt, + stepId: step.stepId, + traceCarrier: await serializeTraceCarrier(), + requestedAt: new Date(), + }, + { + idempotencyKey: queueItem.correlationId, + } + ); + } catch (err) { + if (WorkflowAPIError.is(err) && err.status === 409) { + // Step already exists, so we can skip it console.warn( - `Workflow run "${runId}" has already completed, skipping hook "${queueItem.correlationId}": ${err.message}` + `Step "${queueItem.stepName}" with correlation ID "${queueItem.correlationId}" already exists, skipping: ${err.message}` ); continue; } + throw err; } - throw err; - } - } else if (queueItem.type === 'wait') { - // Handle wait operations - try { - // Only create wait_created event if it hasn't been created yet - if (!queueItem.hasCreatedEvent) { + } else if (queueItem.type === 'hook') { + // Handle hook operations + try { + // Create hook in database + const hookMetadata = + typeof queueItem.metadata === 'undefined' + ? undefined + : dehydrateStepArguments( + queueItem.metadata, + err.globalThis + ); + await world.hooks.create(runId, { + hookId: queueItem.correlationId, + token: queueItem.token, + metadata: hookMetadata, + }); + + // Create hook_created event in event log await world.events.create(runId, { - eventType: 'wait_created', + eventType: 'hook_created', correlationId: queueItem.correlationId, - eventData: { - resumeAt: queueItem.resumeAt, - }, }); + } catch (err) { + if (WorkflowAPIError.is(err)) { + if (err.status === 409) { + // Hook already exists (duplicate hook_id constraint), so we can skip it + console.warn( + `Hook with correlation ID "${queueItem.correlationId}" already exists, skipping: ${err.message}` + ); + continue; + } else if (err.status === 410) { + // Workflow has already completed, so no-op + console.warn( + `Workflow run "${runId}" has already completed, skipping hook "${queueItem.correlationId}": ${err.message}` + ); + continue; + } + } + throw err; } + } else if (queueItem.type === 'wait') { + // Handle wait operations + try { + // Only create wait_created event if it hasn't been created yet + if (!queueItem.hasCreatedEvent) { + await world.events.create(runId, { + eventType: 'wait_created', + correlationId: queueItem.correlationId, + eventData: { + resumeAt: queueItem.resumeAt, + }, + }); + } - // Calculate how long to wait before resuming - const now = Date.now(); - const resumeAtMs = queueItem.resumeAt.getTime(); - const delayMs = Math.max(1000, resumeAtMs - now); - const timeoutSeconds = Math.ceil(delayMs / 1000); - - // Track the minimum timeout across all waits - if ( - minTimeoutSeconds === null || - timeoutSeconds < minTimeoutSeconds - ) { - minTimeoutSeconds = timeoutSeconds; - } - } catch (err) { - if (WorkflowAPIError.is(err) && err.status === 409) { - // Wait already exists, so we can skip it - console.warn( - `Wait with correlation ID "${queueItem.correlationId}" already exists, skipping: ${err.message}` - ); - continue; + // Calculate how long to wait before resuming + const now = Date.now(); + const resumeAtMs = queueItem.resumeAt.getTime(); + const delayMs = Math.max(1000, resumeAtMs - now); + const timeoutSeconds = Math.ceil(delayMs / 1000); + + // Track the minimum timeout across all waits + if ( + minTimeoutSeconds === null || + timeoutSeconds < minTimeoutSeconds + ) { + minTimeoutSeconds = timeoutSeconds; + } + } catch (err) { + if (WorkflowAPIError.is(err) && err.status === 409) { + // Wait already exists, so we can skip it + console.warn( + `Wait with correlation ID "${queueItem.correlationId}" already exists, skipping: ${err.message}` + ); + continue; + } + throw err; } - throw err; } } - } - span?.setAttributes({ - ...Attribute.WorkflowRunStatus('pending_steps'), - ...Attribute.WorkflowStepsCreated(err.steps.length), - }); + span?.setAttributes({ + ...Attribute.WorkflowRunStatus('pending_steps'), + ...Attribute.WorkflowStepsCreated(err.steps.length), + }); - // If we encountered any waits, return the minimum timeout - if (minTimeoutSeconds !== null) { - return { timeoutSeconds: minTimeoutSeconds }; - } - } else { - const errorName = getErrorName(err); - const errorMessage = - err instanceof Error ? err.message : String(err); - let errorStack = getErrorStack(err); - - // Remap error stack using source maps to show original source locations - if (errorStack) { - const parsedName = parseWorkflowName(workflowName); - const filename = parsedName?.path || workflowName; - errorStack = remapErrorStack( - errorStack, - filename, - workflowCode + // If we encountered any waits, return the minimum timeout + if (minTimeoutSeconds !== null) { + return { timeoutSeconds: minTimeoutSeconds }; + } + } else { + const errorName = getErrorName(err); + const errorMessage = + err instanceof Error ? err.message : String(err); + let errorStack = getErrorStack(err); + + // Remap error stack using source maps to show original source locations + if (errorStack) { + const parsedName = parseWorkflowName(workflowName); + const filename = parsedName?.path || workflowName; + errorStack = remapErrorStack( + errorStack, + filename, + workflowCode + ); + } + + console.error( + `${errorName} while running "${runId}" workflow:\n\n${errorStack}` ); + await world.runs.update(runId, { + status: 'failed', + error: { + message: errorMessage, + stack: errorStack, + // TODO: include error codes when we define them + }, + }); + span?.setAttributes({ + ...Attribute.WorkflowRunStatus('failed'), + ...Attribute.WorkflowErrorName(errorName), + ...Attribute.WorkflowErrorMessage(String(err)), + }); } - - console.error( - `${errorName} while running "${runId}" workflow:\n\n${errorStack}` - ); - await world.runs.update(runId, { - status: 'failed', - error: { - message: errorMessage, - stack: errorStack, - // TODO: include error codes when we define them - }, - }); - span?.setAttributes({ - ...Attribute.WorkflowRunStatus('failed'), - ...Attribute.WorkflowErrorName(errorName), - ...Attribute.WorkflowErrorMessage(String(err)), - }); } } - }); // End withTraceContext + ); // End withTraceContext }); } ); @@ -582,7 +600,9 @@ export const stepEntrypoint = workflowStartedAt, stepId, traceCarrier: traceContext, + requestedAt, } = StepInvokePayloadSchema.parse(message_); + const spanLinks = await linkToCurrentContext(); // Execute step within the propagated trace context return await withTraceContext(traceContext, async () => { // Extract the step name from the topic name @@ -592,233 +612,201 @@ export const stepEntrypoint = // Get the port early to avoid async operations during step execution const port = await getPort(); - return trace(`STEP ${stepName}`, async (span) => { - span?.setAttributes({ - ...Attribute.StepName(stepName), - ...Attribute.StepAttempt(metadata.attempt), - ...Attribute.QueueName(metadata.queueName), - }); + return trace( + `STEP ${stepName}`, + { kind: await getSpanKind('CONSUMER'), links: spanLinks }, + async (span) => { + span?.setAttributes({ + ...Attribute.StepName(stepName), + ...Attribute.StepAttempt(metadata.attempt), + ...Attribute.QueueName(metadata.queueName), + ...Attribute.QueueMessageId(metadata.messageId), + ...getQueueOverhead({ requestedAt }), + }); - const stepFn = getStepFunction(stepName); - if (!stepFn) { - throw new Error(`Step "${stepName}" not found`); - } - if (typeof stepFn !== 'function') { - throw new Error( - `Step "${stepName}" is not a function (got ${typeof stepFn})` - ); - } + const stepFn = getStepFunction(stepName); + if (!stepFn) { + throw new Error(`Step "${stepName}" not found`); + } + if (typeof stepFn !== 'function') { + throw new Error( + `Step "${stepName}" is not a function (got ${typeof stepFn})` + ); + } - span?.setAttributes({ - ...Attribute.WorkflowName(workflowName), - ...Attribute.WorkflowRunId(workflowRunId), - ...Attribute.StepId(stepId), - ...Attribute.StepMaxRetries(stepFn.maxRetries ?? 3), - ...Attribute.StepTracePropagated(!!traceContext), - }); - - let step = await world.steps.get(workflowRunId, stepId); - - runtimeLogger.debug('Step execution details', { - stepName, - stepId: step.stepId, - status: step.status, - attempt: step.attempt, - }); - - span?.setAttributes({ - ...Attribute.StepStatus(step.status), - }); - - // Check if the step has a `retryAfter` timestamp that hasn't been reached yet - const now = Date.now(); - if (step.retryAfter && step.retryAfter.getTime() > now) { - const timeoutSeconds = Math.ceil( - (step.retryAfter.getTime() - now) / 1000 - ); span?.setAttributes({ - ...Attribute.StepRetryTimeoutSeconds(timeoutSeconds), + ...Attribute.WorkflowName(workflowName), + ...Attribute.WorkflowRunId(workflowRunId), + ...Attribute.StepId(stepId), + ...Attribute.StepMaxRetries(stepFn.maxRetries ?? 3), + ...Attribute.StepTracePropagated(!!traceContext), }); - runtimeLogger.debug('Step retryAfter timestamp not yet reached', { + + let step = await world.steps.get(workflowRunId, stepId); + + runtimeLogger.debug('Step execution details', { stepName, stepId: step.stepId, - retryAfter: step.retryAfter, - timeoutSeconds, + status: step.status, + attempt: step.attempt, + }); + + span?.setAttributes({ + ...Attribute.StepStatus(step.status), }); - return { timeoutSeconds }; - } - let result: unknown; - const attempt = step.attempt + 1; - try { - if (!['pending', 'running'].includes(step.status)) { - // We should only be running the step if it's either - // a) pending - initial state, or state set on re-try - // b) running - if a step fails mid-execution, like a function timeout - // otherwise, the step has been invoked erroneously - console.error( - `[Workflows] "${workflowRunId}" - Step invoked erroneously, expected status "pending" or "running", got "${step.status}" instead, skipping execution` + // Check if the step has a `retryAfter` timestamp that hasn't been reached yet + const now = Date.now(); + if (step.retryAfter && step.retryAfter.getTime() > now) { + const timeoutSeconds = Math.ceil( + (step.retryAfter.getTime() - now) / 1000 ); span?.setAttributes({ - ...Attribute.StepSkipped(true), - ...Attribute.StepSkipReason(step.status), + ...Attribute.StepRetryTimeoutSeconds(timeoutSeconds), + }); + runtimeLogger.debug('Step retryAfter timestamp not yet reached', { + stepName, + stepId: step.stepId, + retryAfter: step.retryAfter, + timeoutSeconds, }); - return; + return { timeoutSeconds }; } - await world.events.create(workflowRunId, { - eventType: 'step_started', // TODO: Replace with 'step_retrying' - correlationId: stepId, - }); + let result: unknown; + const attempt = step.attempt + 1; + try { + if (!['pending', 'running'].includes(step.status)) { + // We should only be running the step if it's either + // a) pending - initial state, or state set on re-try + // b) running - if a step fails mid-execution, like a function timeout + // otherwise, the step has been invoked erroneously + console.error( + `[Workflows] "${workflowRunId}" - Step invoked erroneously, expected status "pending" or "running", got "${step.status}" instead, skipping execution` + ); + span?.setAttributes({ + ...Attribute.StepSkipped(true), + ...Attribute.StepSkipReason(step.status), + }); + return; + } - step = await world.steps.update(workflowRunId, stepId, { - attempt, - status: 'running', - }); + await world.events.create(workflowRunId, { + eventType: 'step_started', // TODO: Replace with 'step_retrying' + correlationId: stepId, + }); + + step = await world.steps.update(workflowRunId, stepId, { + attempt, + status: 'running', + }); - if (!step.startedAt) { - throw new WorkflowRuntimeError( - `Step "${stepId}" has no "startedAt" timestamp` + if (!step.startedAt) { + throw new WorkflowRuntimeError( + `Step "${stepId}" has no "startedAt" timestamp` + ); + } + // Hydrate the step input arguments and closure variables + const ops: Promise[] = []; + const hydratedInput = hydrateStepArguments( + step.input, + ops, + workflowRunId ); - } - // Hydrate the step input arguments and closure variables - const ops: Promise[] = []; - const hydratedInput = hydrateStepArguments( - step.input, - ops, - workflowRunId - ); - const args = hydratedInput.args; + const args = hydratedInput.args; - span?.setAttributes({ - ...Attribute.StepArgumentsCount(args.length), - }); + span?.setAttributes({ + ...Attribute.StepArgumentsCount(args.length), + }); - result = await contextStorage.run( - { - stepMetadata: { - stepId, - stepStartedAt: new Date(+step.startedAt), - attempt, - }, - workflowMetadata: { - workflowRunId, - workflowStartedAt: new Date(+workflowStartedAt), - // TODO: there should be a getUrl method on the world interface itself. This - // solution only works for vercel + local worlds. - url: process.env.VERCEL_URL - ? `https://${process.env.VERCEL_URL}` - : `http://localhost:${port ?? 3000}`, + result = await contextStorage.run( + { + stepMetadata: { + stepId, + stepStartedAt: new Date(+step.startedAt), + attempt, + }, + workflowMetadata: { + workflowRunId, + workflowStartedAt: new Date(+workflowStartedAt), + // TODO: there should be a getUrl method on the world interface itself. This + // solution only works for vercel + local worlds. + url: process.env.VERCEL_URL + ? `https://${process.env.VERCEL_URL}` + : `http://localhost:${port ?? 3000}`, + }, + ops, + closureVars: hydratedInput.closureVars, }, - ops, - closureVars: hydratedInput.closureVars, - }, - () => stepFn.apply(null, args) - ); - - // NOTE: None of the code from this point is guaranteed to run - // Since the step might fail or cause a function timeout and the process might be SIGKILL'd - // The workflow runtime must be resilient to the below code not executing on a failed step - result = dehydrateStepReturnValue(result, ops, workflowRunId); - - waitUntil( - Promise.all(ops).catch((err) => { - // Ignore expected client disconnect errors (e.g., browser refresh during streaming) - const isAbortError = - err?.name === 'AbortError' || err?.name === 'ResponseAborted'; - if (!isAbortError) throw err; - }) - ); - - // Mark the step as completed first. This order is important. If a concurrent - // execution marked the step as complete, this request should throw, and - // this prevent the step_completed event in the event log - // TODO: this should really be atomic and handled by the world - await world.steps.update(workflowRunId, stepId, { - status: 'completed', - output: result as Serializable, - }); - - // Then, append the event log with the step result - await world.events.create(workflowRunId, { - eventType: 'step_completed', - correlationId: stepId, - eventData: { - result: result as Serializable, - }, - }); + () => stepFn.apply(null, args) + ); - span?.setAttributes({ - ...Attribute.StepStatus('completed'), - ...Attribute.StepResultType(typeof result), - }); - } catch (err: unknown) { - span?.setAttributes({ - ...Attribute.StepErrorName(getErrorName(err)), - ...Attribute.StepErrorMessage(String(err)), - }); + // NOTE: None of the code from this point is guaranteed to run + // Since the step might fail or cause a function timeout and the process might be SIGKILL'd + // The workflow runtime must be resilient to the below code not executing on a failed step + result = dehydrateStepReturnValue(result, ops, workflowRunId); + + waitUntil( + Promise.all(ops).catch((err) => { + // Ignore expected client disconnect errors (e.g., browser refresh during streaming) + const isAbortError = + err?.name === 'AbortError' || + err?.name === 'ResponseAborted'; + if (!isAbortError) throw err; + }) + ); - if (WorkflowAPIError.is(err)) { - if (err.status === 410) { - // Workflow has already completed, so no-op - console.warn( - `Workflow run "${workflowRunId}" has already completed, skipping step "${stepId}": ${err.message}` - ); - return; - } - } + // Mark the step as completed first. This order is important. If a concurrent + // execution marked the step as complete, this request should throw, and + // this prevent the step_completed event in the event log + // TODO: this should really be atomic and handled by the world + await world.steps.update(workflowRunId, stepId, { + status: 'completed', + output: result as Serializable, + }); - if (FatalError.is(err)) { - const errorStack = getErrorStack(err); - const stackLines = errorStack.split('\n').slice(0, 4); - console.error( - `[Workflows] "${workflowRunId}" - Encountered \`FatalError\` while executing step "${stepName}":\n > ${stackLines.join('\n > ')}\n\nBubbling up error to parent workflow` - ); - // Fatal error - store the error in the event log and re-invoke the workflow + // Then, append the event log with the step result await world.events.create(workflowRunId, { - eventType: 'step_failed', + eventType: 'step_completed', correlationId: stepId, eventData: { - error: String(err), - stack: errorStack, - fatal: true, - }, - }); - await world.steps.update(workflowRunId, stepId, { - status: 'failed', - error: { - message: err.message || String(err), - stack: errorStack, - // TODO: include error codes when we define them + result: result as Serializable, }, }); span?.setAttributes({ - ...Attribute.StepStatus('failed'), - ...Attribute.StepFatalError(true), + ...Attribute.StepStatus('completed'), + ...Attribute.StepResultType(typeof result), }); - } else { - const maxRetries = stepFn.maxRetries ?? 3; - + } catch (err: unknown) { span?.setAttributes({ - ...Attribute.StepAttempt(attempt), - ...Attribute.StepMaxRetries(maxRetries), + ...Attribute.StepErrorName(getErrorName(err)), + ...Attribute.StepErrorMessage(String(err)), }); - if (attempt >= maxRetries) { - // Max retries reached + if (WorkflowAPIError.is(err)) { + if (err.status === 410) { + // Workflow has already completed, so no-op + console.warn( + `Workflow run "${workflowRunId}" has already completed, skipping step "${stepId}": ${err.message}` + ); + return; + } + } + + if (FatalError.is(err)) { const errorStack = getErrorStack(err); const stackLines = errorStack.split('\n').slice(0, 4); console.error( - `[Workflows] "${workflowRunId}" - Encountered \`Error\` while executing step "${stepName}" (attempt ${attempt}):\n > ${stackLines.join('\n > ')}\n\n Max retries reached\n Bubbling error to parent workflow` + `[Workflows] "${workflowRunId}" - Encountered \`FatalError\` while executing step "${stepName}":\n > ${stackLines.join('\n > ')}\n\nBubbling up error to parent workflow` ); - const errorMessage = `Step "${stepName}" failed after max retries: ${String(err)}`; + // Fatal error - store the error in the event log and re-invoke the workflow await world.events.create(workflowRunId, { eventType: 'step_failed', correlationId: stepId, eventData: { - error: errorMessage, + error: String(err), stack: errorStack, fatal: true, }, @@ -826,71 +814,150 @@ export const stepEntrypoint = await world.steps.update(workflowRunId, stepId, { status: 'failed', error: { - message: errorMessage, + message: err.message || String(err), stack: errorStack, + // TODO: include error codes when we define them }, }); span?.setAttributes({ ...Attribute.StepStatus('failed'), - ...Attribute.StepRetryExhausted(true), + ...Attribute.StepFatalError(true), }); } else { - // Not at max retries yet - log as a retryable error - if (RetryableError.is(err)) { - console.warn( - `[Workflows] "${workflowRunId}" - Encountered \`RetryableError\` while executing step "${stepName}" (attempt ${attempt}):\n > ${String(err.message)}\n\n This step has failed but will be retried` - ); - } else { - const stackLines = getErrorStack(err).split('\n').slice(0, 4); + const maxRetries = stepFn.maxRetries ?? 3; + + span?.setAttributes({ + ...Attribute.StepAttempt(attempt), + ...Attribute.StepMaxRetries(maxRetries), + }); + + if (attempt >= maxRetries) { + // Max retries reached + const errorStack = getErrorStack(err); + const stackLines = errorStack.split('\n').slice(0, 4); console.error( - `[Workflows] "${workflowRunId}" - Encountered \`Error\` while executing step "${stepName}" (attempt ${attempt}):\n > ${stackLines.join('\n > ')}\n\n This step has failed but will be retried` + `[Workflows] "${workflowRunId}" - Encountered \`Error\` while executing step "${stepName}" (attempt ${attempt}):\n > ${stackLines.join('\n > ')}\n\n Max retries reached\n Bubbling error to parent workflow` ); - } - await world.events.create(workflowRunId, { - eventType: 'step_failed', - correlationId: stepId, - eventData: { - error: String(err), - stack: getErrorStack(err), - }, - }); + const errorMessage = `Step "${stepName}" failed after max retries: ${String(err)}`; + await world.events.create(workflowRunId, { + eventType: 'step_failed', + correlationId: stepId, + eventData: { + error: errorMessage, + stack: errorStack, + fatal: true, + }, + }); + await world.steps.update(workflowRunId, stepId, { + status: 'failed', + error: { + message: errorMessage, + stack: errorStack, + }, + }); - await world.steps.update(workflowRunId, stepId, { - status: 'pending', // TODO: Should be "retrying" once we have that status - ...(RetryableError.is(err) && { - retryAfter: err.retryAfter, - }), - }); + span?.setAttributes({ + ...Attribute.StepStatus('failed'), + ...Attribute.StepRetryExhausted(true), + }); + } else { + // Not at max retries yet - log as a retryable error + if (RetryableError.is(err)) { + console.warn( + `[Workflows] "${workflowRunId}" - Encountered \`RetryableError\` while executing step "${stepName}" (attempt ${attempt}):\n > ${String(err.message)}\n\n This step has failed but will be retried` + ); + } else { + const stackLines = getErrorStack(err) + .split('\n') + .slice(0, 4); + console.error( + `[Workflows] "${workflowRunId}" - Encountered \`Error\` while executing step "${stepName}" (attempt ${attempt}):\n > ${stackLines.join('\n > ')}\n\n This step has failed but will be retried` + ); + } + await world.events.create(workflowRunId, { + eventType: 'step_failed', + correlationId: stepId, + eventData: { + error: String(err), + stack: getErrorStack(err), + }, + }); - const timeoutSeconds = Math.max( - 1, - RetryableError.is(err) - ? Math.ceil((+err.retryAfter.getTime() - Date.now()) / 1000) - : 1 - ); + await world.steps.update(workflowRunId, stepId, { + status: 'pending', // TODO: Should be "retrying" once we have that status + ...(RetryableError.is(err) && { + retryAfter: err.retryAfter, + }), + }); - span?.setAttributes({ - ...Attribute.StepRetryTimeoutSeconds(timeoutSeconds), - ...Attribute.StepRetryWillRetry(true), - }); + const timeoutSeconds = Math.max( + 1, + RetryableError.is(err) + ? Math.ceil( + (+err.retryAfter.getTime() - Date.now()) / 1000 + ) + : 1 + ); - // It's a retryable error - so have the queue keep the message visible - // so that it gets retried. - return { timeoutSeconds }; + span?.setAttributes({ + ...Attribute.StepRetryTimeoutSeconds(timeoutSeconds), + ...Attribute.StepRetryWillRetry(true), + }); + + // It's a retryable error - so have the queue keep the message visible + // so that it gets retried. + return { timeoutSeconds }; + } } } - } - await world.queue(`__wkf_workflow_${workflowName}`, { - runId: workflowRunId, - traceCarrier: await serializeTraceCarrier(), - } satisfies WorkflowInvokePayload); - }); + await queueMessage(world, `__wkf_workflow_${workflowName}`, { + runId: workflowRunId, + traceCarrier: await serializeTraceCarrier(), + requestedAt: new Date(), + }); + } + ); }); } ); +/** + * Queues a message to the specified queue with tracing. + */ +async function queueMessage( + world: World, + ...args: Parameters +) { + const queueName = args[0]; + await trace( + 'queueMessage', + { + attributes: Attribute.QueueName(queueName), + kind: await getSpanKind('PRODUCER'), + }, + async (span) => { + const { messageId } = await world.queue(...args); + span?.setAttributes(Attribute.QueueMessageId(messageId)); + } + ); +} + +/** + * Calculates the queue overhead time in milliseconds for a given message. + */ +function getQueueOverhead(message: { requestedAt?: Date }) { + if (!message.requestedAt) return; + try { + return Attribute.QueueOverheadMs( + Date.now() - message.requestedAt.getTime() + ); + } catch { + return; + } +} + // this is a no-op placeholder as the client is // expecting this to be present but we aren't actually using it export function runStep() {} diff --git a/packages/core/src/runtime/resume-hook.ts b/packages/core/src/runtime/resume-hook.ts index 9ae5146b8..cf0db83cf 100644 --- a/packages/core/src/runtime/resume-hook.ts +++ b/packages/core/src/runtime/resume-hook.ts @@ -1,7 +1,6 @@ import { waitUntil } from '@vercel/functions'; import { ERROR_SLUGS, WorkflowRuntimeError } from '@workflow/errors'; -import type { Hook } from '@workflow/world'; -import type { WorkflowInvokePayload } from '../schemas.js'; +import type { Hook, WorkflowInvokePayload } from '@workflow/world'; import { dehydrateStepReturnValue, hydrateStepArguments, diff --git a/packages/core/src/runtime/start.test.ts b/packages/core/src/runtime/start.test.ts new file mode 100644 index 000000000..370642794 --- /dev/null +++ b/packages/core/src/runtime/start.test.ts @@ -0,0 +1,70 @@ +import { WorkflowRuntimeError } from '@workflow/errors'; +import { describe, expect, it, vi } from 'vitest'; +import { start } from './start.js'; + +// Mock @vercel/functions +vi.mock('@vercel/functions', () => ({ + waitUntil: vi.fn(), +})); + +// Mock the world module with all required exports +vi.mock('./world.js', () => ({ + getWorld: vi.fn(), + getWorldHandlers: vi.fn(() => ({ + createQueueHandler: vi.fn(() => vi.fn()), + })), +})); + +describe('start', () => { + describe('error handling', () => { + it('should throw WorkflowRuntimeError when workflow is undefined', async () => { + await expect( + // @ts-expect-error - intentionally passing undefined + start(undefined, []) + ).rejects.toThrow(WorkflowRuntimeError); + + await expect( + // @ts-expect-error - intentionally passing undefined + start(undefined, []) + ).rejects.toThrow( + `'start' received an invalid workflow function. Ensure the Workflow Development Kit is configured correctly and the function includes a 'use workflow' directive.` + ); + }); + + it('should throw WorkflowRuntimeError when workflow is null', async () => { + await expect( + // @ts-expect-error - intentionally passing null + start(null, []) + ).rejects.toThrow(WorkflowRuntimeError); + + await expect( + // @ts-expect-error - intentionally passing null + start(null, []) + ).rejects.toThrow( + `'start' received an invalid workflow function. Ensure the Workflow Development Kit is configured correctly and the function includes a 'use workflow' directive.` + ); + }); + + it('should throw WorkflowRuntimeError when workflow has no workflowId', async () => { + const invalidWorkflow = () => Promise.resolve('result'); + + await expect(start(invalidWorkflow, [])).rejects.toThrow( + WorkflowRuntimeError + ); + + await expect(start(invalidWorkflow, [])).rejects.toThrow( + `'start' received an invalid workflow function. Ensure the Workflow Development Kit is configured correctly and the function includes a 'use workflow' directive.` + ); + }); + + it('should throw WorkflowRuntimeError when workflow has empty string workflowId', async () => { + const invalidWorkflow = Object.assign(() => Promise.resolve('result'), { + workflowId: '', + }); + + await expect(start(invalidWorkflow, [])).rejects.toThrow( + WorkflowRuntimeError + ); + }); + }); +}); diff --git a/packages/core/src/runtime/start.ts b/packages/core/src/runtime/start.ts index aae60fa73..687bf012b 100644 --- a/packages/core/src/runtime/start.ts +++ b/packages/core/src/runtime/start.ts @@ -1,8 +1,9 @@ import { waitUntil } from '@vercel/functions'; import { WorkflowRuntimeError } from '@workflow/errors'; import { withResolvers } from '@workflow/utils'; +import type { WorkflowInvokePayload } from '@workflow/world'; import { Run } from '../runtime.js'; -import type { Serializable, WorkflowInvokePayload } from '../schemas.js'; +import type { Serializable } from '../schemas.js'; import { dehydrateWorkflowArguments } from '../serialization.js'; import * as Attribute from '../telemetry/semantic-conventions.js'; import { serializeTraceCarrier, trace } from '../telemetry.js'; @@ -58,7 +59,7 @@ export async function start( ) { return await waitedUntil(() => { // @ts-expect-error this field is added by our client transform - const workflowName = workflow.workflowId; + const workflowName = workflow?.workflowId; if (!workflowName) { throw new WorkflowRuntimeError( diff --git a/packages/core/src/runtime/world.ts b/packages/core/src/runtime/world.ts index 732a1aa52..8ea9395d7 100644 --- a/packages/core/src/runtime/world.ts +++ b/packages/core/src/runtime/world.ts @@ -1,7 +1,7 @@ import { createRequire } from 'node:module'; import { join } from 'node:path'; import type { World } from '@workflow/world'; -import { createEmbeddedWorld } from '@workflow/world-local'; +import { createLocalWorld } from '@workflow/world-local'; import { createVercelWorld } from '@workflow/world-vercel'; const require = createRequire(join(process.cwd(), 'index.js')); @@ -14,12 +14,12 @@ const globalSymbols: typeof globalThis & { [StubbedWorldCache]?: World; } = globalThis; -function defaultWorld(): 'vercel' | 'embedded' { +function defaultWorld(): 'vercel' | 'local' { if (process.env.VERCEL_DEPLOYMENT_ID) { return 'vercel'; } - return 'embedded'; + return 'local'; } /** @@ -42,9 +42,9 @@ export const createWorld = (): World => { }); } - if (targetWorld === 'embedded') { - return createEmbeddedWorld({ - dataDir: process.env.WORKFLOW_EMBEDDED_DATA_DIR, + if (targetWorld === 'local') { + return createLocalWorld({ + dataDir: process.env.WORKFLOW_LOCAL_DATA_DIR, }); } diff --git a/packages/core/src/schemas.ts b/packages/core/src/schemas.ts index 69f317033..c5a484452 100644 --- a/packages/core/src/schemas.ts +++ b/packages/core/src/schemas.ts @@ -1,24 +1,3 @@ -import { z } from 'zod'; - -// OpenTelemetry trace context for distributed tracing -const TraceCarrierSchema = z.record(z.string(), z.string()); - -export const WorkflowInvokePayloadSchema = z.object({ - runId: z.string(), - traceCarrier: TraceCarrierSchema.optional(), -}); - -export const StepInvokePayloadSchema = z.object({ - workflowName: z.string(), - workflowRunId: z.string(), - workflowStartedAt: z.number(), - stepId: z.string(), - traceCarrier: TraceCarrierSchema.optional(), -}); - -export type WorkflowInvokePayload = z.infer; -export type StepInvokePayload = z.infer; - /** * A serializable value: * Any valid JSON object is serializable diff --git a/packages/core/src/telemetry.ts b/packages/core/src/telemetry.ts index ea1218ae6..d1c88e2a5 100644 --- a/packages/core/src/telemetry.ts +++ b/packages/core/src/telemetry.ts @@ -1,4 +1,5 @@ -import type { Span, SpanOptions } from '@opentelemetry/api'; +import type * as api from '@opentelemetry/api'; +import type { Span, SpanKind, SpanOptions } from '@opentelemetry/api'; import { once } from '@workflow/utils'; // ============================================================ @@ -118,9 +119,7 @@ export async function getSpanContextForTraceCarrier( } export async function getActiveSpan() { - const otel = await OtelApi.value; - if (!otel) return null; - return otel.trace.getActiveSpan(); + return await withOtel((otel) => otel.trace.getActiveSpan()); } export function instrumentObject(prefix: string, o: T): T { @@ -137,3 +136,23 @@ export function instrumentObject(prefix: string, o: T): T { } return handlers; } + +export async function getSpanKind(field: keyof typeof SpanKind) { + return withOtel((x) => x.SpanKind[field]); +} + +export async function withOtel( + fn: (otel: typeof api) => T +): Promise | undefined> { + const otel = await OtelApi.value; + if (!otel) return undefined; + return await fn(otel); +} + +export function linkToCurrentContext(): Promise<[api.Link] | undefined> { + return withOtel((otel): [api.Link] | undefined => { + const context = otel.trace.getActiveSpan()?.spanContext(); + if (!context) return; + return [{ context }]; + }); +} diff --git a/packages/core/src/telemetry/semantic-conventions.ts b/packages/core/src/telemetry/semantic-conventions.ts index b76329dee..1787825b2 100644 --- a/packages/core/src/telemetry/semantic-conventions.ts +++ b/packages/core/src/telemetry/semantic-conventions.ts @@ -37,15 +37,16 @@ * @packageDocumentation */ -import type { Step, WorkflowRun } from '@workflow/world'; +import type { MessageId, Step, WorkflowRun } from '@workflow/world'; /** * Creates a semantic convention function that returns an attribute object. * @param name - The attribute name following OpenTelemetry semantic conventions * @returns A function that takes a value and returns an attribute object */ -function SemanticConvention(name: string) { - return (value: T) => ({ [name]: value }); +function SemanticConvention(...names: string[]) { + return (value: T) => + Object.fromEntries(names.map((name) => [name, value] as const)); } // Workflow attributes @@ -173,6 +174,15 @@ export const StepRetryWillRetry = SemanticConvention( /** Name of the queue being used for message processing */ export const QueueName = SemanticConvention('queue.name'); +/** The message id being handled */ +export const QueueMessageId = SemanticConvention( + 'messaging.message.id', + 'queue.message.id' +); + +/** Time taken to enqueue the message in milliseconds */ +export const QueueOverheadMs = SemanticConvention('queue.overhead_ms'); + // Deployment attributes /** Unique identifier for the deployment environment */ diff --git a/packages/core/src/workflow.ts b/packages/core/src/workflow.ts index f27f4453f..542d97a9c 100644 --- a/packages/core/src/workflow.ts +++ b/packages/core/src/workflow.ts @@ -7,6 +7,7 @@ import * as nanoid from 'nanoid'; import { monotonicFactory } from 'ulid'; import { EventConsumerResult, EventsConsumer } from './events-consumer.js'; import { ENOTSUP } from './global.js'; +import { parseWorkflowName } from './parse-name.js'; import type { WorkflowOrchestratorContext } from './private.js'; import { dehydrateWorkflowReturnValue, @@ -28,7 +29,6 @@ import type { WorkflowMetadata } from './workflow/get-workflow-metadata.js'; import { WORKFLOW_CONTEXT_SYMBOL } from './workflow/get-workflow-metadata.js'; import { createCreateHook } from './workflow/hook.js'; import { createSleep } from './workflow/sleep.js'; -import { parseWorkflowName } from './parse-name.js'; export async function runWorkflow( workflowCode: string, @@ -104,7 +104,7 @@ export async function runWorkflow( getWorkflowRunStreamId(workflowRun.runId, namespace); // TODO: there should be a getUrl method on the world interface itself. This - // solution only works for vercel + embedded worlds. + // solution only works for vercel + local worlds. const url = process.env.VERCEL_URL ? `https://${process.env.VERCEL_URL}` : `http://localhost:${port ?? 3000}`; diff --git a/packages/errors/CHANGELOG.md b/packages/errors/CHANGELOG.md index 4345dce8c..8b4304856 100644 --- a/packages/errors/CHANGELOG.md +++ b/packages/errors/CHANGELOG.md @@ -1,5 +1,14 @@ # @workflow/errors +## 4.0.1-beta.7 + +### Patch Changes + +- Updated dependencies [bc9b628] +- Updated dependencies [34f3f86] +- Updated dependencies [cd451e0] + - @workflow/utils@4.0.1-beta.5 + ## 4.0.1-beta.6 ### Patch Changes diff --git a/packages/errors/package.json b/packages/errors/package.json index 9df1d18c0..352f13b38 100644 --- a/packages/errors/package.json +++ b/packages/errors/package.json @@ -1,7 +1,7 @@ { "name": "@workflow/errors", "description": "A package for standardizing errors in Workflow DevKit", - "version": "4.0.1-beta.6", + "version": "4.0.1-beta.7", "type": "module", "main": "dist/index.js", "files": [ diff --git a/packages/next/CHANGELOG.md b/packages/next/CHANGELOG.md index f31c99d6f..b8b88109b 100644 --- a/packages/next/CHANGELOG.md +++ b/packages/next/CHANGELOG.md @@ -1,5 +1,32 @@ # @workflow/next +## 4.0.1-beta.24 + +### Patch Changes + +- Updated dependencies [2f0840b] +- Updated dependencies [555d7a6] + - @workflow/core@4.0.1-beta.21 + - @workflow/swc-plugin@4.0.1-beta.10 + - @workflow/builders@4.0.1-beta.20 + +## 4.0.1-beta.23 + +### Patch Changes + +- 8d4562e: Rename leftover references to "embedded world" to be "local world" +- Updated dependencies [d53bf90] +- Updated dependencies [0f1645b] +- Updated dependencies [3c19e90] +- Updated dependencies [1ac5592] +- Updated dependencies [5b91861] +- Updated dependencies [bdde1bd] +- Updated dependencies [0cacb99] +- Updated dependencies [8d4562e] + - @workflow/builders@4.0.1-beta.19 + - @workflow/core@4.0.1-beta.20 + - @workflow/swc-plugin@4.0.1-beta.9 + ## 4.0.1-beta.22 ### Patch Changes diff --git a/packages/next/package.json b/packages/next/package.json index 30690885b..f7600b4c6 100644 --- a/packages/next/package.json +++ b/packages/next/package.json @@ -1,6 +1,6 @@ { "name": "@workflow/next", - "version": "4.0.1-beta.22", + "version": "4.0.1-beta.24", "description": "Next.js integration for Workflow DevKit", "type": "commonjs", "main": "dist/index.js", diff --git a/packages/next/src/index.ts b/packages/next/src/index.ts index ab29ddaa6..886b37973 100644 --- a/packages/next/src/index.ts +++ b/packages/next/src/index.ts @@ -13,7 +13,7 @@ export function withWorkflow( workflows, }: { workflows?: { - embedded?: { + local?: { port?: number; dataDir?: string; }; @@ -22,10 +22,10 @@ export function withWorkflow( ) { if (!process.env.VERCEL_DEPLOYMENT_ID) { if (!process.env.WORKFLOW_TARGET_WORLD) { - process.env.WORKFLOW_TARGET_WORLD = 'embedded'; - process.env.WORKFLOW_EMBEDDED_DATA_DIR = '.next/workflow-data'; + process.env.WORKFLOW_TARGET_WORLD = 'local'; + process.env.WORKFLOW_LOCAL_DATA_DIR = '.next/workflow-data'; } - const maybePort = workflows?.embedded?.port; + const maybePort = workflows?.local?.port; if (maybePort) { process.env.PORT = maybePort.toString(); } diff --git a/packages/nitro/CHANGELOG.md b/packages/nitro/CHANGELOG.md index 873eeb340..959ec059a 100644 --- a/packages/nitro/CHANGELOG.md +++ b/packages/nitro/CHANGELOG.md @@ -1,5 +1,34 @@ # @vercel/workflow-nitro +## 4.0.1-beta.24 + +### Patch Changes + +- Updated dependencies [2f0840b] +- Updated dependencies [555d7a6] + - @workflow/core@4.0.1-beta.21 + - @workflow/swc-plugin@4.0.1-beta.10 + - @workflow/builders@4.0.1-beta.20 + - @workflow/rollup@4.0.0-beta.4 + +## 4.0.1-beta.23 + +### Patch Changes + +- 3c19e90: Fix Nitro and SvelteKit build race conditions and make writing debug file atomic +- Updated dependencies [d53bf90] +- Updated dependencies [0f1645b] +- Updated dependencies [3c19e90] +- Updated dependencies [1ac5592] +- Updated dependencies [5b91861] +- Updated dependencies [bdde1bd] +- Updated dependencies [0cacb99] +- Updated dependencies [8d4562e] + - @workflow/builders@4.0.1-beta.19 + - @workflow/core@4.0.1-beta.20 + - @workflow/swc-plugin@4.0.1-beta.9 + - @workflow/rollup@4.0.0-beta.3 + ## 4.0.1-beta.22 ### Patch Changes diff --git a/packages/nitro/package.json b/packages/nitro/package.json index 74cbf85b1..c10bb667b 100644 --- a/packages/nitro/package.json +++ b/packages/nitro/package.json @@ -1,6 +1,6 @@ { "name": "@workflow/nitro", - "version": "4.0.1-beta.22", + "version": "4.0.1-beta.24", "description": "Nitro integration for Workflow DevKit", "type": "module", "main": "dist/index.js", diff --git a/packages/nuxt/CHANGELOG.md b/packages/nuxt/CHANGELOG.md index 5c38be9d2..515eec6e9 100644 --- a/packages/nuxt/CHANGELOG.md +++ b/packages/nuxt/CHANGELOG.md @@ -1,5 +1,18 @@ # @workflow/nuxt +## 4.0.1-beta.13 + +### Patch Changes + +- @workflow/nitro@4.0.1-beta.24 + +## 4.0.1-beta.12 + +### Patch Changes + +- Updated dependencies [3c19e90] + - @workflow/nitro@4.0.1-beta.23 + ## 4.0.1-beta.11 ### Patch Changes diff --git a/packages/nuxt/package.json b/packages/nuxt/package.json index db1223184..12b503720 100644 --- a/packages/nuxt/package.json +++ b/packages/nuxt/package.json @@ -1,6 +1,6 @@ { "name": "@workflow/nuxt", - "version": "4.0.1-beta.11", + "version": "4.0.1-beta.13", "description": "Nuxt integration for Workflow DevKit", "license": "Apache-2.0", "type": "module", diff --git a/packages/rollup/CHANGELOG.md b/packages/rollup/CHANGELOG.md index 69fc0342d..d4da60577 100644 --- a/packages/rollup/CHANGELOG.md +++ b/packages/rollup/CHANGELOG.md @@ -1,5 +1,20 @@ # @workflow/rollup +## 4.0.0-beta.4 + +### Patch Changes + +- Updated dependencies [555d7a6] + - @workflow/swc-plugin@4.0.1-beta.10 + +## 4.0.0-beta.3 + +### Patch Changes + +- Updated dependencies [5b91861] +- Updated dependencies [0cacb99] + - @workflow/swc-plugin@4.0.1-beta.9 + ## 4.0.0-beta.2 ### Patch Changes diff --git a/packages/rollup/package.json b/packages/rollup/package.json index 8a7c40869..aa8a498ea 100644 --- a/packages/rollup/package.json +++ b/packages/rollup/package.json @@ -1,6 +1,6 @@ { "name": "@workflow/rollup", - "version": "4.0.0-beta.2", + "version": "4.0.0-beta.4", "description": "Rollup plugin for Workflow DevKit", "type": "module", "main": "dist/index.js", diff --git a/packages/sveltekit/CHANGELOG.md b/packages/sveltekit/CHANGELOG.md index 2a6ee0a4f..cf74ef3f6 100644 --- a/packages/sveltekit/CHANGELOG.md +++ b/packages/sveltekit/CHANGELOG.md @@ -1,5 +1,28 @@ # @workflow/sveltekit +## 4.0.0-beta.18 + +### Patch Changes + +- Updated dependencies [555d7a6] + - @workflow/swc-plugin@4.0.1-beta.10 + - @workflow/builders@4.0.1-beta.20 + - @workflow/rollup@4.0.0-beta.4 + +## 4.0.0-beta.17 + +### Patch Changes + +- 3c19e90: Fix Nitro and SvelteKit build race conditions and make writing debug file atomic +- Updated dependencies [d53bf90] +- Updated dependencies [3c19e90] +- Updated dependencies [1ac5592] +- Updated dependencies [5b91861] +- Updated dependencies [0cacb99] + - @workflow/builders@4.0.1-beta.19 + - @workflow/swc-plugin@4.0.1-beta.9 + - @workflow/rollup@4.0.0-beta.3 + ## 4.0.0-beta.16 ### Patch Changes diff --git a/packages/sveltekit/package.json b/packages/sveltekit/package.json index 956b79859..5e0bad9c9 100644 --- a/packages/sveltekit/package.json +++ b/packages/sveltekit/package.json @@ -1,6 +1,6 @@ { "name": "@workflow/sveltekit", - "version": "4.0.0-beta.16", + "version": "4.0.0-beta.18", "description": "SvelteKit integration for Workflow DevKit", "type": "module", "main": "dist/index.js", diff --git a/packages/swc-plugin-workflow/CHANGELOG.md b/packages/swc-plugin-workflow/CHANGELOG.md index 5cd4557ed..ceb4dfebf 100644 --- a/packages/swc-plugin-workflow/CHANGELOG.md +++ b/packages/swc-plugin-workflow/CHANGELOG.md @@ -1,5 +1,18 @@ # @workflow/swc-plugin +## 4.0.1-beta.10 + +### Patch Changes + +- 555d7a6: Normalize anonymous default export workflow IDs to "default" + +## 4.0.1-beta.9 + +### Patch Changes + +- 5b91861: Apply workflow function transformation in "step" mode +- 0cacb99: Support nested "use step" declarations in non-workflow functions + ## 4.0.1-beta.8 ### Patch Changes diff --git a/packages/swc-plugin-workflow/package.json b/packages/swc-plugin-workflow/package.json index 26d37bca1..3588969f5 100644 --- a/packages/swc-plugin-workflow/package.json +++ b/packages/swc-plugin-workflow/package.json @@ -1,6 +1,6 @@ { "name": "@workflow/swc-plugin", - "version": "4.0.1-beta.8", + "version": "4.0.1-beta.10", "description": "SWC plugin for transforming Workflow DevKit code", "license": "Apache-2.0", "type": "module", diff --git a/packages/swc-plugin-workflow/transform/src/lib.rs b/packages/swc-plugin-workflow/transform/src/lib.rs index ce578e3ac..63c66de03 100644 --- a/packages/swc-plugin-workflow/transform/src/lib.rs +++ b/packages/swc-plugin-workflow/transform/src/lib.rs @@ -2229,7 +2229,20 @@ impl StepTransform { // Create a statement that adds workflowId property to a function (client mode) fn create_workflow_id_assignment(&self, fn_name: &str, span: swc_core::common::Span) -> Stmt { - let workflow_id = self.create_id(Some(fn_name), span, true); + // For workflow ID generation, normalize auto-generated __default variants to "default" + // Only do this if the name was auto-generated for an anonymous default export, + // not if the user explicitly named their function "__default" + let id_name = if (fn_name == "__default" || fn_name.starts_with("__default$")) + && self + .workflow_export_to_const_name + .get("default") + .map_or(false, |const_name| const_name == fn_name) + { + "default" + } else { + fn_name + }; + let workflow_id = self.create_id(Some(id_name), span, true); // Create: functionName.workflowId = "workflowId" Stmt::Expr(ExprStmt { @@ -2671,14 +2684,24 @@ impl StepTransform { let workflow_entries: Vec = sorted_workflow_names .into_iter() .map(|fn_name| { - // Check if this export name has a different const name (e.g., "default" -> "__default") let fn_name_str: &str = fn_name; + // Look up the actual const/function name for this export let actual_name = self .workflow_export_to_const_name .get(fn_name_str) .map(|s| s.as_str()) .unwrap_or(fn_name_str); - let workflow_id = self.create_id(Some(actual_name), DUMMY_SP, true); + // For auto-generated __default names (anonymous default exports), + // normalize to "default" for the workflow ID + let id_name = + if (actual_name == "__default" || actual_name.starts_with("__default$")) + && fn_name_str == "default" + { + "default" + } else { + actual_name + }; + let workflow_id = self.create_id(Some(id_name), DUMMY_SP, true); format!("\"{}\":{{\"workflowId\":\"{}\"}}", fn_name_str, workflow_id) }) .collect(); diff --git a/packages/swc-plugin-workflow/transform/tests/fixture/anonymous-default-workflow/output-client.js b/packages/swc-plugin-workflow/transform/tests/fixture/anonymous-default-workflow/output-client.js index 56e4f7406..b99c06bfe 100644 --- a/packages/swc-plugin-workflow/transform/tests/fixture/anonymous-default-workflow/output-client.js +++ b/packages/swc-plugin-workflow/transform/tests/fixture/anonymous-default-workflow/output-client.js @@ -1,7 +1,7 @@ // Test anonymous default export workflow -/**__internal_workflows{"workflows":{"input.js":{"default":{"workflowId":"workflow//input.js//__default"}}}}*/; +/**__internal_workflows{"workflows":{"input.js":{"default":{"workflowId":"workflow//input.js//default"}}}}*/; const __default = async function() { throw new Error("You attempted to execute workflow __default function directly. To start a workflow, use start(__default) from workflow/api"); }; -__default.workflowId = "workflow//input.js//__default"; +__default.workflowId = "workflow//input.js//default"; export default __default; diff --git a/packages/swc-plugin-workflow/transform/tests/fixture/anonymous-default-workflow/output-step.js b/packages/swc-plugin-workflow/transform/tests/fixture/anonymous-default-workflow/output-step.js index 56e4f7406..b99c06bfe 100644 --- a/packages/swc-plugin-workflow/transform/tests/fixture/anonymous-default-workflow/output-step.js +++ b/packages/swc-plugin-workflow/transform/tests/fixture/anonymous-default-workflow/output-step.js @@ -1,7 +1,7 @@ // Test anonymous default export workflow -/**__internal_workflows{"workflows":{"input.js":{"default":{"workflowId":"workflow//input.js//__default"}}}}*/; +/**__internal_workflows{"workflows":{"input.js":{"default":{"workflowId":"workflow//input.js//default"}}}}*/; const __default = async function() { throw new Error("You attempted to execute workflow __default function directly. To start a workflow, use start(__default) from workflow/api"); }; -__default.workflowId = "workflow//input.js//__default"; +__default.workflowId = "workflow//input.js//default"; export default __default; diff --git a/packages/swc-plugin-workflow/transform/tests/fixture/anonymous-default-workflow/output-workflow.js b/packages/swc-plugin-workflow/transform/tests/fixture/anonymous-default-workflow/output-workflow.js index aef7b24c0..de198aac5 100644 --- a/packages/swc-plugin-workflow/transform/tests/fixture/anonymous-default-workflow/output-workflow.js +++ b/packages/swc-plugin-workflow/transform/tests/fixture/anonymous-default-workflow/output-workflow.js @@ -1,8 +1,8 @@ // Test anonymous default export workflow -/**__internal_workflows{"workflows":{"input.js":{"default":{"workflowId":"workflow//input.js//__default"}}}}*/; +/**__internal_workflows{"workflows":{"input.js":{"default":{"workflowId":"workflow//input.js//default"}}}}*/; const __default = async function() { const result = await someStep(); return result; }; -__default.workflowId = "workflow//input.js//__default"; +__default.workflowId = "workflow//input.js//default"; export default __default; diff --git a/packages/swc-plugin-workflow/transform/tests/fixture/default-arrow-workflow/output-client.js b/packages/swc-plugin-workflow/transform/tests/fixture/default-arrow-workflow/output-client.js index 8e26bc6cb..53f511a13 100644 --- a/packages/swc-plugin-workflow/transform/tests/fixture/default-arrow-workflow/output-client.js +++ b/packages/swc-plugin-workflow/transform/tests/fixture/default-arrow-workflow/output-client.js @@ -1,7 +1,7 @@ // Test default export arrow workflow -/**__internal_workflows{"workflows":{"input.js":{"default":{"workflowId":"workflow//input.js//__default"}}}}*/; +/**__internal_workflows{"workflows":{"input.js":{"default":{"workflowId":"workflow//input.js//default"}}}}*/; const __default = async (data)=>{ throw new Error("You attempted to execute workflow __default function directly. To start a workflow, use start(__default) from workflow/api"); }; -__default.workflowId = "workflow//input.js//__default"; +__default.workflowId = "workflow//input.js//default"; export default __default; diff --git a/packages/swc-plugin-workflow/transform/tests/fixture/default-arrow-workflow/output-step.js b/packages/swc-plugin-workflow/transform/tests/fixture/default-arrow-workflow/output-step.js index 8e26bc6cb..53f511a13 100644 --- a/packages/swc-plugin-workflow/transform/tests/fixture/default-arrow-workflow/output-step.js +++ b/packages/swc-plugin-workflow/transform/tests/fixture/default-arrow-workflow/output-step.js @@ -1,7 +1,7 @@ // Test default export arrow workflow -/**__internal_workflows{"workflows":{"input.js":{"default":{"workflowId":"workflow//input.js//__default"}}}}*/; +/**__internal_workflows{"workflows":{"input.js":{"default":{"workflowId":"workflow//input.js//default"}}}}*/; const __default = async (data)=>{ throw new Error("You attempted to execute workflow __default function directly. To start a workflow, use start(__default) from workflow/api"); }; -__default.workflowId = "workflow//input.js//__default"; +__default.workflowId = "workflow//input.js//default"; export default __default; diff --git a/packages/swc-plugin-workflow/transform/tests/fixture/default-arrow-workflow/output-workflow.js b/packages/swc-plugin-workflow/transform/tests/fixture/default-arrow-workflow/output-workflow.js index f79827ce0..fa9fd56da 100644 --- a/packages/swc-plugin-workflow/transform/tests/fixture/default-arrow-workflow/output-workflow.js +++ b/packages/swc-plugin-workflow/transform/tests/fixture/default-arrow-workflow/output-workflow.js @@ -1,8 +1,8 @@ // Test default export arrow workflow -/**__internal_workflows{"workflows":{"input.js":{"default":{"workflowId":"workflow//input.js//__default"}}}}*/; +/**__internal_workflows{"workflows":{"input.js":{"default":{"workflowId":"workflow//input.js//default"}}}}*/; const __default = async (data)=>{ const processed = await processData(data); return processed; }; -__default.workflowId = "workflow//input.js//__default"; +__default.workflowId = "workflow//input.js//default"; export default __default; diff --git a/packages/swc-plugin-workflow/transform/tests/fixture/default-workflow-collision/output-client.js b/packages/swc-plugin-workflow/transform/tests/fixture/default-workflow-collision/output-client.js index bcd09ef34..db28be2e6 100644 --- a/packages/swc-plugin-workflow/transform/tests/fixture/default-workflow-collision/output-client.js +++ b/packages/swc-plugin-workflow/transform/tests/fixture/default-workflow-collision/output-client.js @@ -1,10 +1,10 @@ // Existing variable named __default -/**__internal_workflows{"workflows":{"input.js":{"default":{"workflowId":"workflow//input.js//__default$1"}}}}*/; +/**__internal_workflows{"workflows":{"input.js":{"default":{"workflowId":"workflow//input.js//default"}}}}*/; const __default = "existing variable"; // Use it to avoid unused variable console.log(__default); const __default$1 = async function() { throw new Error("You attempted to execute workflow __default$1 function directly. To start a workflow, use start(__default$1) from workflow/api"); }; -__default$1.workflowId = "workflow//input.js//__default$1"; +__default$1.workflowId = "workflow//input.js//default"; export default __default$1; diff --git a/packages/swc-plugin-workflow/transform/tests/fixture/default-workflow-collision/output-step.js b/packages/swc-plugin-workflow/transform/tests/fixture/default-workflow-collision/output-step.js index bcd09ef34..db28be2e6 100644 --- a/packages/swc-plugin-workflow/transform/tests/fixture/default-workflow-collision/output-step.js +++ b/packages/swc-plugin-workflow/transform/tests/fixture/default-workflow-collision/output-step.js @@ -1,10 +1,10 @@ // Existing variable named __default -/**__internal_workflows{"workflows":{"input.js":{"default":{"workflowId":"workflow//input.js//__default$1"}}}}*/; +/**__internal_workflows{"workflows":{"input.js":{"default":{"workflowId":"workflow//input.js//default"}}}}*/; const __default = "existing variable"; // Use it to avoid unused variable console.log(__default); const __default$1 = async function() { throw new Error("You attempted to execute workflow __default$1 function directly. To start a workflow, use start(__default$1) from workflow/api"); }; -__default$1.workflowId = "workflow//input.js//__default$1"; +__default$1.workflowId = "workflow//input.js//default"; export default __default$1; diff --git a/packages/swc-plugin-workflow/transform/tests/fixture/default-workflow-collision/output-workflow.js b/packages/swc-plugin-workflow/transform/tests/fixture/default-workflow-collision/output-workflow.js index a384d1b4e..97455800d 100644 --- a/packages/swc-plugin-workflow/transform/tests/fixture/default-workflow-collision/output-workflow.js +++ b/packages/swc-plugin-workflow/transform/tests/fixture/default-workflow-collision/output-workflow.js @@ -1,5 +1,5 @@ // Existing variable named __default -/**__internal_workflows{"workflows":{"input.js":{"default":{"workflowId":"workflow//input.js//__default$1"}}}}*/; +/**__internal_workflows{"workflows":{"input.js":{"default":{"workflowId":"workflow//input.js//default"}}}}*/; const __default = "existing variable"; // Use it to avoid unused variable console.log(__default); @@ -7,5 +7,5 @@ const __default$1 = async function() { const result = await someStep(); return result; }; -__default$1.workflowId = "workflow//input.js//__default$1"; +__default$1.workflowId = "workflow//input.js//default"; export default __default$1; diff --git a/packages/swc-plugin-workflow/transform/tests/fixture/expr-fn-default-workflow/output-client.js b/packages/swc-plugin-workflow/transform/tests/fixture/expr-fn-default-workflow/output-client.js index 224a09903..b3669041e 100644 --- a/packages/swc-plugin-workflow/transform/tests/fixture/expr-fn-default-workflow/output-client.js +++ b/packages/swc-plugin-workflow/transform/tests/fixture/expr-fn-default-workflow/output-client.js @@ -1,6 +1,6 @@ -/**__internal_workflows{"workflows":{"input.js":{"default":{"workflowId":"workflow//input.js//__default"}}}}*/; +/**__internal_workflows{"workflows":{"input.js":{"default":{"workflowId":"workflow//input.js//default"}}}}*/; const __default = async function() { throw new Error("You attempted to execute workflow __default function directly. To start a workflow, use start(__default) from workflow/api"); }; -__default.workflowId = "workflow//input.js//__default"; +__default.workflowId = "workflow//input.js//default"; export default __default; diff --git a/packages/swc-plugin-workflow/transform/tests/fixture/expr-fn-default-workflow/output-step.js b/packages/swc-plugin-workflow/transform/tests/fixture/expr-fn-default-workflow/output-step.js index 224a09903..b3669041e 100644 --- a/packages/swc-plugin-workflow/transform/tests/fixture/expr-fn-default-workflow/output-step.js +++ b/packages/swc-plugin-workflow/transform/tests/fixture/expr-fn-default-workflow/output-step.js @@ -1,6 +1,6 @@ -/**__internal_workflows{"workflows":{"input.js":{"default":{"workflowId":"workflow//input.js//__default"}}}}*/; +/**__internal_workflows{"workflows":{"input.js":{"default":{"workflowId":"workflow//input.js//default"}}}}*/; const __default = async function() { throw new Error("You attempted to execute workflow __default function directly. To start a workflow, use start(__default) from workflow/api"); }; -__default.workflowId = "workflow//input.js//__default"; +__default.workflowId = "workflow//input.js//default"; export default __default; diff --git a/packages/swc-plugin-workflow/transform/tests/fixture/expr-fn-default-workflow/output-workflow.js b/packages/swc-plugin-workflow/transform/tests/fixture/expr-fn-default-workflow/output-workflow.js index 06a92baef..ef4329502 100644 --- a/packages/swc-plugin-workflow/transform/tests/fixture/expr-fn-default-workflow/output-workflow.js +++ b/packages/swc-plugin-workflow/transform/tests/fixture/expr-fn-default-workflow/output-workflow.js @@ -1,7 +1,7 @@ -/**__internal_workflows{"workflows":{"input.js":{"default":{"workflowId":"workflow//input.js//__default"}}}}*/; +/**__internal_workflows{"workflows":{"input.js":{"default":{"workflowId":"workflow//input.js//default"}}}}*/; const __default = async function() { const result = await someStep(); return result; }; -__default.workflowId = "workflow//input.js//__default"; +__default.workflowId = "workflow//input.js//default"; export default __default; diff --git a/packages/swc-plugin-workflow/transform/tests/fixture/user-named-dunder-default/input.js b/packages/swc-plugin-workflow/transform/tests/fixture/user-named-dunder-default/input.js new file mode 100644 index 000000000..778e1df9b --- /dev/null +++ b/packages/swc-plugin-workflow/transform/tests/fixture/user-named-dunder-default/input.js @@ -0,0 +1,8 @@ +// User explicitly names their workflow function __default +// The workflow ID should use "__default", not normalize to "default" +export async function __default() { + 'use workflow'; + const result = await someStep(); + return result; +} + diff --git a/packages/swc-plugin-workflow/transform/tests/fixture/user-named-dunder-default/output-client.js b/packages/swc-plugin-workflow/transform/tests/fixture/user-named-dunder-default/output-client.js new file mode 100644 index 000000000..701e58400 --- /dev/null +++ b/packages/swc-plugin-workflow/transform/tests/fixture/user-named-dunder-default/output-client.js @@ -0,0 +1,7 @@ +// User explicitly names their workflow function __default +// The workflow ID should use "__default", not normalize to "default" +/**__internal_workflows{"workflows":{"input.js":{"__default":{"workflowId":"workflow//input.js//__default"}}}}*/; +export async function __default() { + throw new Error("You attempted to execute workflow __default function directly. To start a workflow, use start(__default) from workflow/api"); +} +__default.workflowId = "workflow//input.js//__default"; diff --git a/packages/swc-plugin-workflow/transform/tests/fixture/user-named-dunder-default/output-step.js b/packages/swc-plugin-workflow/transform/tests/fixture/user-named-dunder-default/output-step.js new file mode 100644 index 000000000..701e58400 --- /dev/null +++ b/packages/swc-plugin-workflow/transform/tests/fixture/user-named-dunder-default/output-step.js @@ -0,0 +1,7 @@ +// User explicitly names their workflow function __default +// The workflow ID should use "__default", not normalize to "default" +/**__internal_workflows{"workflows":{"input.js":{"__default":{"workflowId":"workflow//input.js//__default"}}}}*/; +export async function __default() { + throw new Error("You attempted to execute workflow __default function directly. To start a workflow, use start(__default) from workflow/api"); +} +__default.workflowId = "workflow//input.js//__default"; diff --git a/packages/swc-plugin-workflow/transform/tests/fixture/user-named-dunder-default/output-workflow.js b/packages/swc-plugin-workflow/transform/tests/fixture/user-named-dunder-default/output-workflow.js new file mode 100644 index 000000000..e6dfc6bf8 --- /dev/null +++ b/packages/swc-plugin-workflow/transform/tests/fixture/user-named-dunder-default/output-workflow.js @@ -0,0 +1,8 @@ +// User explicitly names their workflow function __default +// The workflow ID should use "__default", not normalize to "default" +/**__internal_workflows{"workflows":{"input.js":{"__default":{"workflowId":"workflow//input.js//__default"}}}}*/; +export async function __default() { + const result = await someStep(); + return result; +} +__default.workflowId = "workflow//input.js//__default"; diff --git a/packages/utils/CHANGELOG.md b/packages/utils/CHANGELOG.md index b64434d3f..4bc1bf945 100644 --- a/packages/utils/CHANGELOG.md +++ b/packages/utils/CHANGELOG.md @@ -1,5 +1,13 @@ # @workflow/utils +## 4.0.1-beta.5 + +### Patch Changes + +- bc9b628: Prevent @vercel/nft from tracing /proc paths during build +- 34f3f86: fix(utils): detect linux ports via /proc +- cd451e0: Replace execa dependency with built-in node execFile + ## 4.0.1-beta.4 ### Patch Changes diff --git a/packages/utils/package.json b/packages/utils/package.json index e9c0b4b38..f7f784a34 100644 --- a/packages/utils/package.json +++ b/packages/utils/package.json @@ -1,7 +1,7 @@ { "name": "@workflow/utils", "description": "Utility functions for Workflow DevKit", - "version": "4.0.1-beta.4", + "version": "4.0.1-beta.5", "type": "module", "main": "dist/index.js", "files": [ diff --git a/packages/web-shared/CHANGELOG.md b/packages/web-shared/CHANGELOG.md index b025eebc6..174e8dc22 100644 --- a/packages/web-shared/CHANGELOG.md +++ b/packages/web-shared/CHANGELOG.md @@ -1,5 +1,23 @@ # @workflow/web-shared +## 4.0.1-beta.22 + +### Patch Changes + +- Updated dependencies [2f0840b] + - @workflow/core@4.0.1-beta.21 + +## 4.0.1-beta.21 + +### Patch Changes + +- Updated dependencies [0f1645b] +- Updated dependencies [10c5b91] +- Updated dependencies [bdde1bd] +- Updated dependencies [8d4562e] + - @workflow/core@4.0.1-beta.20 + - @workflow/world@4.0.1-beta.8 + ## 4.0.1-beta.20 ### Patch Changes diff --git a/packages/web-shared/package.json b/packages/web-shared/package.json index a4bd00d9b..5db38bf6d 100644 --- a/packages/web-shared/package.json +++ b/packages/web-shared/package.json @@ -1,7 +1,7 @@ { "name": "@workflow/web-shared", "description": "Shared components for Workflow Observability UI", - "version": "4.0.1-beta.20", + "version": "4.0.1-beta.22", "private": false, "files": [ "dist", diff --git a/packages/web/CHANGELOG.md b/packages/web/CHANGELOG.md index 96221dbd7..56a1bfd90 100644 --- a/packages/web/CHANGELOG.md +++ b/packages/web/CHANGELOG.md @@ -1,5 +1,13 @@ # @workflow/web +## 4.0.1-beta.12 + +### Patch Changes + +- 109fe59: Add PostgreSQL backend support in web UI settings +- 10c5b91: Update Next.js version to 16 +- 8d4562e: Rename leftover references to "embedded world" to be "local world" + ## 4.0.1-beta.11 ### Patch Changes diff --git a/packages/web/package.json b/packages/web/package.json index a29aa7300..fbdde48aa 100644 --- a/packages/web/package.json +++ b/packages/web/package.json @@ -1,7 +1,7 @@ { "name": "@workflow/web", "description": "Workflow Observability UI", - "version": "4.0.1-beta.11", + "version": "4.0.1-beta.12", "private": false, "files": [ ".next", diff --git a/packages/web/src/components/display-utils/connection-status.tsx b/packages/web/src/components/display-utils/connection-status.tsx index 9fac111bf..868b3100a 100644 --- a/packages/web/src/components/display-utils/connection-status.tsx +++ b/packages/web/src/components/display-utils/connection-status.tsx @@ -25,22 +25,22 @@ const getConnectionInfo = ( return { provider: 'Vercel', parts }; } - if (backend === 'embedded') { - // Embedded backend + if (backend === 'local') { + // Local backend const parts: string[] = []; if (config.dataDir) { parts.push(`dir: ${config.dataDir}`); } if (config.port) parts.push(`port: ${config.port}`); - return { provider: 'Embedded', parts }; + return { provider: 'Local', parts }; } return { provider: config.backend || 'unknown', parts: [] }; }; export function ConnectionStatus({ config }: ConnectionStatusProps) { - const backend = config.backend || 'embedded'; + const backend = config.backend || 'local'; const { provider, parts } = getConnectionInfo(backend, config); return ( diff --git a/packages/web/src/components/settings-sidebar.tsx b/packages/web/src/components/settings-sidebar.tsx index 3b3682e53..bbc212b3d 100644 --- a/packages/web/src/components/settings-sidebar.tsx +++ b/packages/web/src/components/settings-sidebar.tsx @@ -34,8 +34,8 @@ export function SettingsSidebar() { const { data: worldsAvailability = [], isLoading: isLoadingWorlds } = useWorldsAvailability(); - const backend = localConfig.backend || 'embedded'; - const isEmbedded = backend === 'embedded'; + const backend = localConfig.backend || 'local'; + const isLocal = backend === 'local'; const isPostgres = backend === 'postgres'; // Update local config when query params change @@ -117,7 +117,7 @@ export function SettingsSidebar() {
+ + + + diff --git a/workbench/astro/src/workflows b/workbench/astro/src/workflows new file mode 120000 index 000000000..8a9aab9c9 --- /dev/null +++ b/workbench/astro/src/workflows @@ -0,0 +1 @@ +../../sveltekit/src/workflows \ No newline at end of file diff --git a/workbench/astro/tsconfig.json b/workbench/astro/tsconfig.json new file mode 100644 index 000000000..8bf91d3bb --- /dev/null +++ b/workbench/astro/tsconfig.json @@ -0,0 +1,5 @@ +{ + "extends": "astro/tsconfigs/strict", + "include": [".astro/types.d.ts", "**/*"], + "exclude": ["dist"] +} diff --git a/workbench/example/workflows/99_e2e.ts b/workbench/example/workflows/99_e2e.ts index df5e0b22c..1cb227c54 100644 --- a/workbench/example/workflows/99_e2e.ts +++ b/workbench/example/workflows/99_e2e.ts @@ -10,6 +10,7 @@ import { RetryableError, sleep, } from 'workflow'; +import { getRun, start } from 'workflow/api'; import { callThrower } from './helpers.js'; ////////////////////////////////////////////////////////// @@ -559,3 +560,52 @@ export async function closureVariableWorkflow(baseValue: number) { const output = await calculate(); return output; } + +////////////////////////////////////////////////////////// + +// Child workflow that will be spawned from another workflow +export async function childWorkflow(value: number) { + 'use workflow'; + // Do some processing + const doubled = await doubleValue(value); + return { childResult: doubled, originalValue: value }; +} + +async function doubleValue(value: number) { + 'use step'; + return value * 2; +} + +// Step function that spawns another workflow using start() +async function spawnChildWorkflow(value: number) { + 'use step'; + // start() can only be called inside a step function, not directly in workflow code + const childRun = await start(childWorkflow, [value]); + return childRun.runId; +} + +// Step function that waits for a workflow run to complete and returns its result +async function awaitWorkflowResult(runId: string) { + 'use step'; + const run = getRun(runId); + const result = await run.returnValue; + return result; +} + +export async function spawnWorkflowFromStepWorkflow(inputValue: number) { + 'use workflow'; + // Spawn the child workflow from inside a step function + const childRunId = await spawnChildWorkflow(inputValue); + + // Wait for the child workflow to complete (also in a step) + const childResult = await awaitWorkflowResult<{ + childResult: number; + originalValue: number; + }>(childRunId); + + return { + parentInput: inputValue, + childRunId, + childResult, + }; +} diff --git a/workbench/express/nitro.config.ts b/workbench/express/nitro.config.ts index 8e89eea15..175d1ace4 100644 --- a/workbench/express/nitro.config.ts +++ b/workbench/express/nitro.config.ts @@ -4,7 +4,7 @@ export default defineNitroConfig({ modules: ['workflow/nitro'], vercel: { entryFormat: 'node' }, routes: { - '/**': './src/index.ts', + '/**': { handler: './src/index.ts', format: 'node' }, }, plugins: ['plugins/start-pg-world.ts'], }); diff --git a/workbench/express/package.json b/workbench/express/package.json index c4435e698..ba9ce1bf1 100644 --- a/workbench/express/package.json +++ b/workbench/express/package.json @@ -15,12 +15,11 @@ }, "dependencies": { "express": "^5.1.0", - "nitro": "catalog:", - "srvx": "^0.9.2" + "nitro": "catalog:" }, "devDependencies": { - "@workflow/world-postgres": "workspace:*", "@types/express": "^5.0.5", + "@workflow/world-postgres": "workspace:*", "ai": "catalog:", "lodash.chunk": "^4.2.0", "openai": "^6.1.0", diff --git a/workbench/express/src/index.ts b/workbench/express/src/index.ts index b459fbbf3..76ed45078 100644 --- a/workbench/express/src/index.ts +++ b/workbench/express/src/index.ts @@ -1,5 +1,4 @@ import express from 'express'; -import { toFetchHandler } from 'srvx/node'; import { getHookByToken, getRun, resumeHook, start } from 'workflow/api'; import { hydrateWorkflowArguments } from 'workflow/internal/serialization'; import { allWorkflows } from '../_workflows.js'; @@ -221,4 +220,4 @@ app.post('/api/test-direct-step-call', async (req, res) => { return res.json({ result }); }); -export default toFetchHandler(app as any); +export default app;