diff --git a/.changeset/config.json b/.changeset/config.json index 7427339cb..399f4e904 100644 --- a/.changeset/config.json +++ b/.changeset/config.json @@ -7,5 +7,5 @@ "access": "public", "baseBranch": "main", "updateInternalDependencies": "patch", - "ignore": ["playground"] + "ignore": [] } diff --git a/.github/actions/deployment-comment/action.yml b/.github/actions/deployment-comment/action.yml index 498277058..6a430cbaa 100644 --- a/.github/actions/deployment-comment/action.yml +++ b/.github/actions/deployment-comment/action.yml @@ -3,7 +3,7 @@ description: 'Post deployment status comments on PRs for both preview and produc inputs: project-name: - description: 'Name of the project being deployed (e.g., playground, website)' + description: 'Name of the project being deployed (e.g., website)' required: true preview-url: description: 'The preview deployment URL' diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 93bb42cd7..e9dd13643 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -50,8 +50,8 @@ jobs: - name: Quality gate (lint + typecheck + test) run: pnpm nx affected -t lint typecheck test --parallel --configuration=production --base="$NX_BASE" --head="$NX_HEAD" - - name: Build all affected projects (except playground) - run: pnpm nx affected -t build --configuration=production --parallel --exclude=playground --base="$NX_BASE" --head="$NX_HEAD" + - name: Build all affected projects + run: pnpm nx affected -t build --configuration=production --parallel --base="$NX_BASE" --head="$NX_HEAD" # ─────────────────────────────────────── 2. EDGE-WORKER E2E ────────────────────────────────────── @@ -101,49 +101,7 @@ jobs: run: pnpm nx affected -t test:e2e --parallel --base="$NX_BASE" --head="$NX_HEAD" -# ────────────────────────────────── 3. DEPLOY PLAYGROUND ─────────────────────────── - deploy-playground: - needs: [build-and-test, edge-worker-e2e] - if: false # Disabled - # if: >- - # ${{ - # (github.event_name == 'pull_request') || - # (github.ref == 'refs/heads/main' && github.event_name == 'push') - # }} - runs-on: ubuntu-latest - environment: ${{ github.event_name == 'pull_request' && 'preview' || 'production' }} - env: - NX_CLOUD_ACCESS_TOKEN: ${{ secrets.NX_CLOUD_ACCESS_TOKEN }} - NETLIFY_AUTH_TOKEN: ${{ secrets.NETLIFY_AUTH_TOKEN }} - NETLIFY_SITE_ID: ${{ secrets.NETLIFY_PLAYGROUND_SITE_ID }} - NEXT_PUBLIC_SUPABASE_URL: ${{ github.event_name == 'pull_request' && secrets.DEMO_PREVIEW_SUPABASE_URL || secrets.DEMO_PRODUCTION_SUPABASE_URL }} - NEXT_PUBLIC_SUPABASE_ANON_KEY: ${{ github.event_name == 'pull_request' && secrets.DEMO_PREVIEW_SUPABASE_ANON_KEY || secrets.DEMO_PRODUCTION_SUPABASE_ANON_KEY }} - steps: - - uses: actions/checkout@v4 - with: - fetch-depth: 0 - - - uses: ./.github/actions/setup - - # Build the workspace libraries that the app imports - - run: pnpm nx run-many -t build --projects client,dsl --configuration=production - - - name: Build & deploy to Netlify - id: deploy - run: | - pnpm netlify deploy --build --filter=playground \ - --context ${{ github.event_name == 'pull_request' && 'deploy-preview' || 'production' }} \ - ${{ github.event_name == 'pull_request' && format('--alias=pr-{0}', github.event.pull_request.number) || '--prod' }} - - - name: Post deployment comment - if: always() - uses: ./.github/actions/deployment-comment - with: - project-name: Playground - preview-url: https://pr-${{ github.event.pull_request.number }}--pgflow-demo.netlify.app - production-url: https://playground.pgflow.dev - -# ────────────────────────────────── 4. DEPLOY WEBSITE ─────────────────────────── +# ────────────────────────────────── 3. DEPLOY WEBSITE ─────────────────────────── deploy-website: needs: [build-and-test, edge-worker-e2e] runs-on: ubuntu-latest diff --git a/.nxignore b/.nxignore index 941ecbbef..e8a05a632 100644 --- a/.nxignore +++ b/.nxignore @@ -1,3 +1,2 @@ # Ignore Supabase Edge Functions from Nx dependency analysis -# These are Deno runtime functions with their own import resolution -examples/playground/supabase/functions/ \ No newline at end of file +# These are Deno runtime functions with their own import resolution \ No newline at end of file diff --git a/RELEASES.md b/RELEASES.md index 29df6d40c..6c7a5c96b 100644 --- a/RELEASES.md +++ b/RELEASES.md @@ -48,7 +48,7 @@ When the Version Packages PR is merged: ```bash # 1. Build all packages -pnpm nx run-many -t build --exclude=playground +pnpm nx run-many -t build # 2. Publish to npm (all packages except edge-worker) pnpm publish --recursive --filter=!./pkgs/edge-worker diff --git a/examples/playground/.env.example b/examples/playground/.env.example deleted file mode 100644 index df47c920d..000000000 --- a/examples/playground/.env.example +++ /dev/null @@ -1,6 +0,0 @@ -# Update these with your Supabase details from your project settings > API -# https://app.supabase.com/project/_/settings/api -NEXT_PUBLIC_SUPABASE_URL="http://127.0.0.1:54321" -NEXT_PUBLIC_SUPABASE_ANON_KEY="your key here" -GITHUB_OAUTH_CLIEND_ID= -GITHUB_OAUTH_CLIENT_SECRET= diff --git a/examples/playground/.gitignore b/examples/playground/.gitignore deleted file mode 100644 index 59d63f38b..000000000 --- a/examples/playground/.gitignore +++ /dev/null @@ -1,46 +0,0 @@ -# See https://help.github.com/articles/ignoring-files/ for more about ignoring files. - -# dependencies -/node_modules -/.pnp -.pnp.* -.yarn/* -!.yarn/patches -!.yarn/plugins -!.yarn/releases -!.yarn/versions - -# testing -/coverage - -# next.js -/.next/ -/out/ -/dist/ - -# production -/build - -# misc -.DS_Store -*.pem - -# debug -npm-debug.log* -yarn-debug.log* -yarn-error.log* - -# local env files -.env*.local -.env - -# vercel -.vercel - -# typescript -*.tsbuildinfo -next-env.d.ts -package-lock.json - -# Local development vendor directory -supabase/functions/_vendor/ diff --git a/examples/playground/.npmrc b/examples/playground/.npmrc deleted file mode 100644 index 39074bc13..000000000 --- a/examples/playground/.npmrc +++ /dev/null @@ -1,4 +0,0 @@ -strict-peer-dependencies=false -auto-install-peers=true -@jsr:registry=https://npm.jsr.io -link-workspace-packages=false \ No newline at end of file diff --git a/examples/playground/.nvmrc b/examples/playground/.nvmrc deleted file mode 100644 index f812e4592..000000000 --- a/examples/playground/.nvmrc +++ /dev/null @@ -1 +0,0 @@ -20.18.1 \ No newline at end of file diff --git a/examples/playground/CLAUDE.md b/examples/playground/CLAUDE.md deleted file mode 100644 index 1b4147a06..000000000 --- a/examples/playground/CLAUDE.md +++ /dev/null @@ -1,58 +0,0 @@ -# CLAUDE.md - -> [!WARNING] -> QUOTE ALL THE FILE PATHS THAT CONTAIN ROUTE PARAMETERS LIKE '[run_id]' -> BECAUSE BRACKETS HAVE SPECIAL MEANING IN BASH! -> Do this: `cat 'some/path/to/[id]/page.tsx'` instead of `cat some/path/to/[id]/page.tsx` - -This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository. - -## Build/Run Commands - -- `npm run dev` - Start development server -- `npm run build` - Build the application -- `npm run start` - Start production server -- `npm run start-functions` - Start Supabase Edge Functions -- `npm run start-worker` - Trigger the analyze_website function -- `npm run gen-types` - Generate TypeScript types from Supabase schema - -## Utility Scripts - -### run_sql.sh -A utility script for running SQL queries against the local Supabase database. It automatically gets the database URL from `supabase status`. - -Location: `./scripts/run_sql.sh` - -Usage: -```bash -# Run a simple query -./scripts/run_sql.sh "SELECT * FROM pgflow.flows;" - -# Run with explicit -c flag -./scripts/run_sql.sh -c "SELECT COUNT(*) FROM pgflow.step_tasks;" - -# Run a SQL file -./scripts/run_sql.sh -f some_script.sql - -# Pipe SQL to the script -echo "SELECT NOW();" | ./scripts/run_sql.sh -``` - -Benefits: -- No need to remember or type the database connection string -- Automatically detects if Supabase is running -- Supports all standard psql options -- Simplifies database queries during development - -## Code Style Guidelines - -- **TypeScript**: Use strict mode with proper type annotations -- **Imports**: Use `@/` alias prefix for project imports (e.g., `@/components/ui/button`) -- **Components**: Follow React Server Component patterns with proper props typing -- **Formatting**: Project uses Prettier with default settings -- **UI**: Uses shadcn/ui component library with Tailwind CSS -- **Next.js**: Follow App Router conventions with page.tsx for routes -- **Error Handling**: Use proper error boundaries and form validation -- **Database**: Use Supabase for backend with Edge Functions -- **Naming**: Use PascalCase for components, camelCase for variables/functions -- **File Structure**: Group related components in dedicated folders diff --git a/examples/playground/DEPLOYMENT.md b/examples/playground/DEPLOYMENT.md deleted file mode 100644 index e7e05b28c..000000000 --- a/examples/playground/DEPLOYMENT.md +++ /dev/null @@ -1,179 +0,0 @@ -# Playground Deployment Guide - -This document explains the deployment setup for the pgflow playground example, including important caveats and requirements for CI/CD. - -## Overview - -The playground is a Next.js application deployed to Netlify. It demonstrates pgflow features using Supabase Edge Functions and requires careful coordination between build and deployment steps. - -## Key Configuration Files - -### 1. `project.json` - Nx Configuration - -The playground uses specific output paths that may seem unusual but are required for Next.js + Netlify: - -```json -{ - "build": { - "executor": "@nx/next:build", - "outputs": ["{projectRoot}/.next"], - "options": { - "outputPath": "{projectRoot}" // ⚠️ This is correct - do not change! - } - } -} -``` - -**Why `outputPath: {projectRoot}`?** -- Next.js builds into `.next` folder within the project root -- Netlify needs access to the entire project structure (including `package.json`, `next.config.js`) -- The Netlify Next.js plugin expects standard Next.js directory layout -- Changing this breaks the deployment - -### 2. `netlify.toml` - Netlify Configuration - -```toml -[build] - base = "examples/playground" - command = "pnpm build" - publish = ".next" - -[[plugins]] - package = "@netlify/plugin-nextjs" -``` - -**Important Notes:** -- `base` must point to the playground directory -- `publish` is `.next` (not `dist` or other paths) -- The Next.js plugin handles serverless function generation - -## CI/CD Requirements - -### 1. Build Dependencies First - -The playground imports from workspace packages (`@pgflow/client`, `@pgflow/dsl`). These MUST be built before the playground: - -```yaml -# Build dependencies first -- run: pnpm nx run-many -t build --projects client,dsl --configuration=production - -# Then deploy playground -- run: pnpm netlify deploy --build --filter=playground -``` - -### 2. Environment Variables - -Different environments require different Supabase configurations: - -**Preview Deployments (PRs):** -```yaml -NEXT_PUBLIC_SUPABASE_URL: ${{ secrets.DEMO_PREVIEW_SUPABASE_URL }} -NEXT_PUBLIC_SUPABASE_ANON_KEY: ${{ secrets.DEMO_PREVIEW_SUPABASE_ANON_KEY }} -``` - -**Production Deployments:** -```yaml -NEXT_PUBLIC_SUPABASE_URL: ${{ secrets.DEMO_PRODUCTION_SUPABASE_URL }} -NEXT_PUBLIC_SUPABASE_ANON_KEY: ${{ secrets.DEMO_PRODUCTION_SUPABASE_ANON_KEY }} -``` - -### 3. Netlify CLI Authentication - -Required secrets: -- `NETLIFY_AUTH_TOKEN` - Personal access token from Netlify -- `NETLIFY_SITE_ID` - The site's unique identifier - -### 4. Deployment Commands - -**Preview Deployment:** -```bash -pnpm netlify deploy --build --filter=playground \ - --context deploy-preview \ - --alias=pr-${{ github.event.pull_request.number }} -``` - -**Production Deployment:** -```bash -pnpm netlify deploy --build --filter=playground \ - --context production \ - --prod -``` - -## Common Issues and Solutions - -### Issue: "Cannot find module @pgflow/client" - -**Cause:** Dependencies not built before playground deployment - -**Solution:** Always build `client` and `dsl` packages first: -```bash -pnpm nx run-many -t build --projects client,dsl -``` - -### Issue: Netlify can't find Next.js app - -**Cause:** Incorrect `outputPath` in project.json - -**Solution:** Keep `outputPath: "{projectRoot}"` - do not change to `dist/` or other paths - -### Issue: Preview URLs not working - -**Cause:** Incorrect alias format - -**Solution:** Use predictable aliases like `pr-123`: -- Preview URL format: `https://pr-123--pgflow-demo.netlify.app` -- Site name is `pgflow-demo`, not the site ID - -### Issue: Environment variables not available in build - -**Cause:** Variables set in CI but not passed to Netlify build - -**Solution:** Either: -1. Set them in Netlify UI (recommended for secrets) -2. Pass them explicitly in the deploy command -3. Use Netlify's environment variable UI for different contexts - -## Local Testing - -To test the deployment process locally: - -```bash -# 1. Build dependencies -pnpm nx run-many -t build --projects client,dsl - -# 2. Build playground -pnpm nx build playground - -# 3. Test Netlify build (requires Netlify CLI) -cd examples/playground -netlify build - -# 4. Preview locally -netlify dev -``` - -## Deployment Flow - -1. **CI triggers** on PR or push to main -2. **Build job** runs tests and builds all affected projects -3. **Deploy job** checks out code, rebuilds dependencies, and deploys -4. **PR comment** posts deployment status with preview URL - -## Important Caveats - -1. **Do NOT cache node_modules between build and deploy jobs** - The deploy job needs to rebuild to ensure proper module resolution - -2. **Always use `--filter=playground`** with Netlify CLI to ensure it runs from the correct directory - -3. **The `--build` flag is intentional** - It ensures Netlify runs its build process with proper context - -4. **Nx Cloud caching helps** but doesn't replace the need to rebuild in deploy job - -5. **Environment variables must be set before build** - Next.js bakes them in at build time - -## Future Improvements - -- Consider using Nx artifacts to pass built packages between jobs -- Implement deployment status checks -- Add rollback capabilities -- Extract deployment URL from Netlify CLI output for accurate PR comments \ No newline at end of file diff --git a/examples/playground/LICENSE b/examples/playground/LICENSE deleted file mode 100644 index 1255b6b41..000000000 --- a/examples/playground/LICENSE +++ /dev/null @@ -1,202 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright 2025 Wojciech Majewski - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/examples/playground/README.md b/examples/playground/README.md deleted file mode 100644 index 85903648a..000000000 --- a/examples/playground/README.md +++ /dev/null @@ -1,159 +0,0 @@ - - Next.js and Supabase Starter Kit - the fastest way to build apps with Next.js and Supabase -

Next.js and Supabase Starter Kit

-
- -

- The fastest way to build apps with Next.js and Supabase -

- -

- Features · - Demo · - Deploy to Vercel · - Clone and run locally · - Feedback and issues - More Examples -

-
- -## Features - -- Works across the entire [Next.js](https://nextjs.org) stack - - App Router - - Pages Router - - Middleware - - Client - - Server - - It just works! -- supabase-ssr. A package to configure Supabase Auth to use cookies -- Styling with [Tailwind CSS](https://tailwindcss.com) -- Components with [shadcn/ui](https://ui.shadcn.com/) -- Optional deployment with [Supabase Vercel Integration and Vercel deploy](#deploy-your-own) - - Environment variables automatically assigned to Vercel project - -## Demo - -You can view a fully working demo at [demo-nextjs-with-supabase.vercel.app](https://demo-nextjs-with-supabase.vercel.app/). - -## Deploy to Vercel - -Vercel deployment will guide you through creating a Supabase account and project. - -After installation of the Supabase integration, all relevant environment variables will be assigned to the project so the deployment is fully functioning. - -[![Deploy with Vercel](https://vercel.com/button)](https://vercel.com/new/clone?repository-url=https%3A%2F%2Fgithub.com%2Fvercel%2Fnext.js%2Ftree%2Fcanary%2Fexamples%2Fwith-supabase&project-name=nextjs-with-supabase&repository-name=nextjs-with-supabase&demo-title=nextjs-with-supabase&demo-description=This+starter+configures+Supabase+Auth+to+use+cookies%2C+making+the+user%27s+session+available+throughout+the+entire+Next.js+app+-+Client+Components%2C+Server+Components%2C+Route+Handlers%2C+Server+Actions+and+Middleware.&demo-url=https%3A%2F%2Fdemo-nextjs-with-supabase.vercel.app%2F&external-id=https%3A%2F%2Fgithub.com%2Fvercel%2Fnext.js%2Ftree%2Fcanary%2Fexamples%2Fwith-supabase&demo-image=https%3A%2F%2Fdemo-nextjs-with-supabase.vercel.app%2Fopengraph-image.png) - -The above will also clone the Starter kit to your GitHub, you can clone that locally and develop locally. - -If you wish to just develop locally and not deploy to Vercel, [follow the steps below](#clone-and-run-locally). - -## Clone and run locally - -1. You'll first need a Supabase project which can be made [via the Supabase dashboard](https://database.new) - -2. Create a Next.js app using the Supabase Starter template npx command - - ```bash - npx create-next-app --example with-supabase with-supabase-app - ``` - - ```bash - yarn create next-app --example with-supabase with-supabase-app - ``` - - ```bash - pnpm create next-app --example with-supabase with-supabase-app - ``` - -3. Use `cd` to change into the app's directory - - ```bash - cd with-supabase-app - ``` - -4. Rename `.env.example` to `.env.local` and update the following: - - ``` - NEXT_PUBLIC_SUPABASE_URL=[INSERT SUPABASE PROJECT URL] - NEXT_PUBLIC_SUPABASE_ANON_KEY=[INSERT SUPABASE PROJECT API ANON KEY] - ``` - - Both `NEXT_PUBLIC_SUPABASE_URL` and `NEXT_PUBLIC_SUPABASE_ANON_KEY` can be found in [your Supabase project's API settings](https://app.supabase.com/project/_/settings/api) - -5. Set up GitHub OAuth provider: - - - Go to [GitHub Developer Settings](https://github.com/settings/developers) - - Click "New OAuth App" - - Fill in the form: - - Application name: (Your app name) - - Homepage URL: `http://localhost:3000` - - Authorization callback URL: `http://localhost:3000/auth/callback` - - Click "Register application" - - Copy the Client ID - - Generate a new Client Secret and copy it - - In your Supabase dashboard, go to Authentication > Providers - - Find GitHub and enable it - - Enter your GitHub Client ID and Client Secret - - Save changes - -6. You can now run the Next.js local development server: - - ```bash - npm run dev - ``` - - The starter kit should now be running on [localhost:3000](http://localhost:3000/). - -7. This template comes with the default shadcn/ui style initialized. If you instead want other ui.shadcn styles, delete `components.json` and [re-install shadcn/ui](https://ui.shadcn.com/docs/installation/next) - -> Check out [the docs for Local Development](https://supabase.com/docs/guides/getting-started/local-development) to also run Supabase locally. - -## Utility Scripts - -### run_sql.sh - -A convenient script for running SQL queries against the local Supabase database without needing to remember connection strings. - -Location: `./scripts/run_sql.sh` - -#### Usage Examples - -```bash -# Run a simple query -./scripts/run_sql.sh "SELECT * FROM pgflow.flows;" - -# Check cron job status -./scripts/run_sql.sh "SELECT * FROM cron.job WHERE jobname = 'pgflow-analyze-website-worker';" - -# View HTTP responses from pg_net -./scripts/run_sql.sh "SELECT id, status_code, created FROM net._http_response ORDER BY created DESC LIMIT 10;" - -# Check pgflow task processing -./scripts/run_sql.sh "SELECT * FROM pgflow.step_tasks WHERE status = 'completed' ORDER BY completed_at DESC LIMIT 10;" - -# Run with explicit -c flag -./scripts/run_sql.sh -c "SELECT COUNT(*) FROM pgflow.step_tasks;" - -# Run a SQL file -./scripts/run_sql.sh -f some_script.sql - -# Pipe SQL to the script -echo "SELECT NOW();" | ./scripts/run_sql.sh -``` - -The script automatically: -- Gets the database URL from `supabase status` -- Checks if Supabase is running -- Passes through all psql options -- Handles both direct SQL queries and file execution - -## Feedback and issues - -Please file feedback and issues over on the [Supabase GitHub org](https://github.com/supabase/supabase/issues/new/choose). - -## More Supabase examples - -- [Next.js Subscription Payments Starter](https://github.com/vercel/nextjs-subscription-payments) -- [Cookie-based Auth and the Next.js 13 App Router (free course)](https://youtube.com/playlist?list=PL5S4mPUpp4OtMhpnp93EFSo42iQ40XjbF) -- [Supabase Auth and the Next.js App Router](https://github.com/supabase/supabase/tree/master/examples/auth/nextjs) diff --git a/examples/playground/app/(auth-pages)/forgot-password/page.tsx b/examples/playground/app/(auth-pages)/forgot-password/page.tsx deleted file mode 100644 index 33da02906..000000000 --- a/examples/playground/app/(auth-pages)/forgot-password/page.tsx +++ /dev/null @@ -1,39 +0,0 @@ -import { forgotPasswordAction } from "@/app/actions"; -import { FormMessage, Message } from "@/components/form-message"; -import { SubmitButton } from "@/components/submit-button"; -import { Input } from "@/components/ui/input"; -import { Label } from "@/components/ui/label"; -import Link from "next/link"; -import { SmtpMessage } from "../smtp-message"; - -export const dynamic = 'force-static'; - -export default async function ForgotPassword(props: { - searchParams: Promise; -}) { - const searchParams = await props.searchParams; - return ( - <> -
-
-

Reset Password

-

- Already have an account?{" "} - - Sign in - -

-
-
- - - - Reset Password - - -
-
- - - ); -} diff --git a/examples/playground/app/(auth-pages)/layout.tsx b/examples/playground/app/(auth-pages)/layout.tsx deleted file mode 100644 index e038de15a..000000000 --- a/examples/playground/app/(auth-pages)/layout.tsx +++ /dev/null @@ -1,9 +0,0 @@ -export default async function Layout({ - children, -}: { - children: React.ReactNode; -}) { - return ( -
{children}
- ); -} diff --git a/examples/playground/app/(auth-pages)/sign-in/page.tsx b/examples/playground/app/(auth-pages)/sign-in/page.tsx deleted file mode 100644 index 1b96369c7..000000000 --- a/examples/playground/app/(auth-pages)/sign-in/page.tsx +++ /dev/null @@ -1,109 +0,0 @@ -'use client'; -export const runtime = 'edge'; - -import { signInAction } from '@/app/actions'; -import { FormMessage, Message } from '@/components/form-message'; -import { GithubButton } from '@/components/github-button'; -import { SubmitButton } from '@/components/submit-button'; -import { Input } from '@/components/ui/input'; -import { Label } from '@/components/ui/label'; -import Link from 'next/link'; -import { useRouter } from 'next/navigation'; -import { useEffect, useState } from 'react'; -import { createClient } from '@/utils/supabase/client'; - -export default function Login({ - searchParams, -}: { - searchParams: Promise; -}) { - const [message, setMessage] = useState(null); - const [isLoading, setIsLoading] = useState(false); - const router = useRouter(); - - // After successful login, redirect to home page - useEffect(() => { - // Process the searchParams Promise when component mounts - const processSearchParams = async () => { - try { - const resolvedParams = await searchParams; - setMessage(resolvedParams); - } catch (error) { - console.error('Error processing searchParams:', error); - } - }; - - processSearchParams(); - - const checkAuth = async () => { - const supabase = createClient(); - const { - data: { user }, - } = await supabase.auth.getUser(); - - // If user is logged in and we're on the sign-in page, redirect to home - if ( - user && - typeof window !== 'undefined' && - window.location.pathname === '/sign-in' - ) { - router.push('/'); - } - }; - - checkAuth(); - }, [router, searchParams]); - - return ( -
{ - e.preventDefault(); - setIsLoading(true); - }}> -

Sign in

-

- Don't have an account?{' '} - - Sign up - -

-
- -
-
- Or continue with email -
-
- - -
- - - Forgot Password? - -
- - - Sign in with email - - -
-
- ); -} diff --git a/examples/playground/app/(auth-pages)/sign-up/page.tsx b/examples/playground/app/(auth-pages)/sign-up/page.tsx deleted file mode 100644 index 3ea6672db..000000000 --- a/examples/playground/app/(auth-pages)/sign-up/page.tsx +++ /dev/null @@ -1,85 +0,0 @@ -'use client'; -export const runtime = 'edge'; - -import { signUpAction } from "@/app/actions"; -import { FormMessage, Message } from "@/components/form-message"; -import { GithubButton } from "@/components/github-button"; -import { SubmitButton } from "@/components/submit-button"; -import { Input } from "@/components/ui/input"; -import { Label } from "@/components/ui/label"; -import Link from "next/link"; -import { useState, useEffect } from 'react'; - -export default function Signup(props: { - searchParams: Promise; -}) { - const [message, setMessage] = useState(null); - const [isLoading, setIsLoading] = useState(false); - - useEffect(() => { - const processSearchParams = async () => { - try { - const resolvedParams = await props.searchParams; - setMessage(resolvedParams); - } catch (error) { - console.error('Error processing searchParams:', error); - } - }; - - processSearchParams(); - }, [props.searchParams]); - - if (message && "message" in message) { - return ( -
- -
- ); - } - - return ( - <> -
setIsLoading(true)}> -

Sign up

-

- Already have an account?{" "} - - Sign in - -

-
- -
-
- Or sign up with email -
-
- - - - - - Sign up with email - - -
-
- - ); -} diff --git a/examples/playground/app/(auth-pages)/smtp-message.tsx b/examples/playground/app/(auth-pages)/smtp-message.tsx deleted file mode 100644 index 84c21fce2..000000000 --- a/examples/playground/app/(auth-pages)/smtp-message.tsx +++ /dev/null @@ -1,25 +0,0 @@ -import { ArrowUpRight, InfoIcon } from "lucide-react"; -import Link from "next/link"; - -export function SmtpMessage() { - return ( -
- -
- - Note: Emails are rate limited. Enable Custom SMTP to - increase the rate limit. - -
- - Learn more - -
-
-
- ); -} diff --git a/examples/playground/app/actions.ts b/examples/playground/app/actions.ts deleted file mode 100644 index b4f55c299..000000000 --- a/examples/playground/app/actions.ts +++ /dev/null @@ -1,148 +0,0 @@ -'use server'; - -import { encodedRedirect } from '@/utils/utils'; -import { createClient } from '@/utils/supabase/server'; -import { headers } from 'next/headers'; -import { redirect } from 'next/navigation'; - -export const signUpAction = async (formData: FormData) => { - const email = formData.get('email')?.toString(); - const password = formData.get('password')?.toString(); - const supabase = await createClient(); - - if (!email || !password) { - return encodedRedirect( - 'error', - '/sign-up', - 'Email and password are required', - ); - } - - // Sign up with auto-confirm enabled (skip email verification) - const { error: signUpError } = await supabase.auth.signUp({ - email, - password, - options: { - // Disable email confirmation flow - emailRedirectTo: undefined, - // Auto-confirm the user - data: { - email_confirmed: true - } - }, - }); - - if (signUpError) { - console.error(signUpError.code + ' ' + signUpError.message); - return encodedRedirect('error', '/sign-up', signUpError.message); - } - - // Automatically sign in the user after sign up - const { error: signInError } = await supabase.auth.signInWithPassword({ - email, - password, - }); - - if (signInError) { - console.error(signInError.code + ' ' + signInError.message); - return encodedRedirect('error', '/sign-up', 'Sign up successful but could not automatically sign in.'); - } - - // Redirect to home page - return redirect('/'); -}; - -export const signInAction = async (formData: FormData) => { - const email = formData.get('email') as string; - const password = formData.get('password') as string; - const supabase = await createClient(); - - const { error } = await supabase.auth.signInWithPassword({ - email, - password, - }); - - if (error) { - return encodedRedirect('error', '/sign-in', error.message); - } - - // Redirect to home page - return redirect('/'); -}; - -export const forgotPasswordAction = async (formData: FormData) => { - const email = formData.get('email')?.toString(); - const supabase = await createClient(); - const origin = (await headers()).get('origin'); - const callbackUrl = formData.get('callbackUrl')?.toString(); - - if (!email) { - return encodedRedirect('error', '/forgot-password', 'Email is required'); - } - - const { error } = await supabase.auth.resetPasswordForEmail(email, { - redirectTo: `${origin}/auth/callback?redirect_to=/protected/reset-password`, - }); - - if (error) { - console.error(error.message); - return encodedRedirect( - 'error', - '/forgot-password', - 'Could not reset password', - ); - } - - if (callbackUrl) { - return redirect(callbackUrl); - } - - return encodedRedirect( - 'success', - '/forgot-password', - 'Check your email for a link to reset your password.', - ); -}; - -export const resetPasswordAction = async (formData: FormData) => { - const supabase = await createClient(); - - const password = formData.get('password') as string; - const confirmPassword = formData.get('confirmPassword') as string; - - if (!password || !confirmPassword) { - encodedRedirect( - 'error', - '/protected/reset-password', - 'Password and confirm password are required', - ); - } - - if (password !== confirmPassword) { - encodedRedirect( - 'error', - '/protected/reset-password', - 'Passwords do not match', - ); - } - - const { error } = await supabase.auth.updateUser({ - password: password, - }); - - if (error) { - encodedRedirect( - 'error', - '/protected/reset-password', - 'Password update failed', - ); - } - - encodedRedirect('success', '/protected/reset-password', 'Password updated'); -}; - -export const signOutAction = async () => { - const supabase = await createClient(); - await supabase.auth.signOut(); - return redirect('/sign-in'); -}; diff --git a/examples/playground/app/auth/callback/route.ts b/examples/playground/app/auth/callback/route.ts deleted file mode 100644 index 0c32b07cd..000000000 --- a/examples/playground/app/auth/callback/route.ts +++ /dev/null @@ -1,30 +0,0 @@ -import { createClient } from '@/utils/supabase/server'; -import { NextResponse } from 'next/server'; - -export async function GET(request: Request) { - // The `/auth/callback` route is required for the server-side auth flow implemented - // by the SSR package. It exchanges an auth code for the user's session. - // https://supabase.com/docs/guides/auth/server-side/nextjs - const requestUrl = new URL(request.url); - const code = requestUrl.searchParams.get('code'); - const origin = requestUrl.origin; - const redirectTo = requestUrl.searchParams.get('redirect_to')?.toString(); - - console.log('Auth callback - code:', code); - console.log('Auth callback - origin:', origin); - console.log('Auth callback - redirectTo:', redirectTo); - - if (code) { - const supabase = await createClient(); - const { data, error } = await supabase.auth.exchangeCodeForSession(code); - console.log('Exchange code result - data:', data); - console.log('Exchange code result - error:', error); - } - - if (redirectTo) { - return NextResponse.redirect(`${origin}${redirectTo}`); - } - - // URL to redirect to after sign up process completes - return NextResponse.redirect(`${origin}/`); -} diff --git a/examples/playground/app/globals.css b/examples/playground/app/globals.css deleted file mode 100644 index 7e4a2c8b8..000000000 --- a/examples/playground/app/globals.css +++ /dev/null @@ -1,90 +0,0 @@ -@tailwind base; -@tailwind components; -@tailwind utilities; - -@layer base { - :root { - --background: 0 0% 100%; - --foreground: 0 0% 3.9%; - --card: 0 0% 100%; - --card-foreground: 0 0% 3.9%; - --popover: 0 0% 100%; - --popover-foreground: 0 0% 3.9%; - --primary: 0 0% 9%; - --primary-foreground: 0 0% 98%; - --secondary: 0 0% 96.1%; - --secondary-foreground: 0 0% 9%; - --muted: 0 0% 96.1%; - --muted-foreground: 0 0% 45.1%; - --accent: 0 0% 96.1%; - --accent-foreground: 0 0% 9%; - --destructive: 0 84.2% 60.2%; - --destructive-foreground: 0 0% 98%; - --border: 0 0% 89.8%; - --input: 0 0% 89.8%; - --ring: 0 0% 3.9%; - --radius: 0.5rem; - --chart-1: 12 76% 61%; - --chart-2: 173 58% 39%; - --chart-3: 197 37% 24%; - --chart-4: 43 74% 66%; - --chart-5: 27 87% 67%; - } - - .dark { - --background: 0 0% 3.9%; - --foreground: 0 0% 98%; - --card: 0 0% 3.9%; - --card-foreground: 0 0% 98%; - --popover: 0 0% 3.9%; - --popover-foreground: 0 0% 98%; - --primary: 0 0% 98%; - --primary-foreground: 0 0% 9%; - --secondary: 0 0% 14.9%; - --secondary-foreground: 0 0% 98%; - --muted: 0 0% 14.9%; - --muted-foreground: 0 0% 63.9%; - --accent: 0 0% 14.9%; - --accent-foreground: 0 0% 98%; - --destructive: 0 62.8% 30.6%; - --destructive-foreground: 0 0% 98%; - --border: 0 0% 14.9%; - --input: 0 0% 14.9%; - --ring: 0 0% 83.1%; - --chart-1: 220 70% 50%; - --chart-2: 160 60% 45%; - --chart-3: 30 80% 55%; - --chart-4: 280 65% 60%; - --chart-5: 340 75% 55%; - } -} - -@layer base { - * { - @apply border-border; - } - - body { - @apply bg-background text-foreground; - } -} - -/********** CUSTOM OVERRIDES **********/ - -@keyframes breathe { - 0% { - opacity: 0.4; - } - - 50% { - opacity: 1; - } - - 100% { - opacity: 0.4; - } -} - -.breathing { - animation: breathe 2s infinite ease-in-out; -} diff --git a/examples/playground/app/layout.tsx b/examples/playground/app/layout.tsx deleted file mode 100644 index 7fd446c14..000000000 --- a/examples/playground/app/layout.tsx +++ /dev/null @@ -1,157 +0,0 @@ -import { EnvVarWarning } from '@/components/env-var-warning'; -import HeaderAuth from '@/components/header-auth'; -import { LoadingStateProvider } from '@/components/loading-state-provider'; -import { PgflowClientProvider } from '@/lib/pgflow-client-provider'; -import { MobileLinks } from '@/components/mobile-links'; -import { ThemeSwitcher } from '@/components/theme-switcher'; -import { hasEnvVars } from '@/utils/supabase/check-env-vars'; -import { Geist } from 'next/font/google'; -import { ThemeProvider } from 'next-themes'; -import Link from 'next/link'; -import { - BookOpen, - Github, - AlertCircle, - Twitter, - MessagesSquare, -} from 'lucide-react'; -import './globals.css'; - -// import PlausibleProvider from 'next-plausible'; - -const defaultUrl = process.env.VERCEL_URL - ? `https://${process.env.VERCEL_URL}` - : 'http://localhost:3000'; - -export const metadata = { - metadataBase: new URL(defaultUrl), - title: 'pgflow', - description: 'Place to test and understand how pgflow works.', -}; - -const geistSans = Geist({ - display: 'swap', - subsets: ['latin'], -}); - -export default function RootLayout({ - children, -}: Readonly<{ - children: React.ReactNode; -}>) { - return ( - - - {/* */} - - - -
-
- -
- {children} -
- - -
-
-
-
-
- {/*
*/} - - - ); -} diff --git a/examples/playground/app/page.tsx b/examples/playground/app/page.tsx deleted file mode 100644 index 493c2814e..000000000 --- a/examples/playground/app/page.tsx +++ /dev/null @@ -1,31 +0,0 @@ -import WebsiteAnalyzerForm from '@/components/website-analyzer-form'; -import ExampleLinks from '@/components/example-links'; - -export const runtime = 'edge'; - -export default function Home() { - - return ( - <> -
-
- -
-

How it works

-

- Enter a URL in the form to analyze a website. Our tool will scrape - it then use AI to summarize it and extract tags. -

- -
-

- Example Websites to Analyze -

- -
-
-
-
- - ); -} diff --git a/examples/playground/app/protected/page.tsx b/examples/playground/app/protected/page.tsx deleted file mode 100644 index 5508abab3..000000000 --- a/examples/playground/app/protected/page.tsx +++ /dev/null @@ -1,38 +0,0 @@ -import FetchDataSteps from "@/components/tutorial/fetch-data-steps"; -import { createClient } from "@/utils/supabase/server"; -import { InfoIcon } from "lucide-react"; -import { redirect } from "next/navigation"; - -export default async function ProtectedPage() { - const supabase = await createClient(); - - const { - data: { user }, - } = await supabase.auth.getUser(); - - if (!user) { - return redirect("/sign-in"); - } - - return ( -
-
-
- - This is a protected page that you can only see as an authenticated - user -
-
-
-

Your user details

-
-          {JSON.stringify(user, null, 2)}
-        
-
-
-

Next steps

- -
-
- ); -} diff --git a/examples/playground/app/protected/reset-password/page.tsx b/examples/playground/app/protected/reset-password/page.tsx deleted file mode 100644 index 1eb8fa55c..000000000 --- a/examples/playground/app/protected/reset-password/page.tsx +++ /dev/null @@ -1,37 +0,0 @@ -import { resetPasswordAction } from '@/app/actions'; -import { FormMessage, Message } from '@/components/form-message'; -import { SubmitButton } from '@/components/submit-button'; -import { Input } from '@/components/ui/input'; -import { Label } from '@/components/ui/label'; - -export default async function ResetPassword(props: { - searchParams: Promise; -}) { - const searchParams = await props.searchParams; - return ( -
-

Reset password

-

- Please enter your new password below. -

- - - - - - Reset password - - - - ); -} diff --git a/examples/playground/app/websites/page.tsx b/examples/playground/app/websites/page.tsx deleted file mode 100644 index d61b45bdc..000000000 --- a/examples/playground/app/websites/page.tsx +++ /dev/null @@ -1,132 +0,0 @@ -'use client'; - -import { createClient } from '@/utils/supabase/client'; -import { useEffect, useState } from 'react'; -import { useStartAnalysis } from '@/lib/hooks/use-start-analysis'; -import type { Database } from '@/supabase/functions/database-types'; -import { Label } from '@/components/ui/label'; -import { Input } from '@/components/ui/input'; -import { FormMessage } from '@/components/form-message'; -import { SubmitButton } from '@/components/submit-button'; -import { SkeletonTable } from '@/components/skeleton-table'; - -type WebsiteRow = Database['public']['Tables']['websites']['Row']; - -export default function Page() { - const [websites, setWebsites] = useState(null); - const [url, setUrl] = useState('https://reddit.com/r/supabase'); - const { start: startAnalysis, isPending, error: formError } = useStartAnalysis(); - const supabase = createClient(); - - // Process URL parameter when the component mounts - useEffect(() => { - const getData = async () => { - const { data } = await supabase.from('websites').select(); - setWebsites(data); - }; - getData(); - - // Check for URL parameter and start analysis if present - if (typeof window !== 'undefined') { - const urlParams = new URLSearchParams(window.location.search); - const urlParam = urlParams.get('url'); - - if (urlParam) { - setUrl(urlParam); - console.log("Found URL parameter, starting analysis:", urlParam); - - // Use a tiny delay to ensure we're already in the client-side - // This helps prevent the page from fully rendering before redirecting - setTimeout(() => { - startAnalysis(urlParam); - }, 10); - } - } - }, [startAnalysis]); - - async function startAnalyzeWebsiteFlow(formData: FormData) { - const url = formData.get('url') as string; - if (url) { - startAnalysis(url); - } - } - - return ( -
-
-
-
-

Analyze a Website

-

- Enter a URL to analyze a website -

-
- - setUrl(e.target.value)} - required - disabled={isPending} - /> -
- - Start Analysis - - {formError && } - -
- -
-

Your Websites

- {websites === null ? ( - - ) : websites.length > 0 ? ( -
-
- - - - - - - - - - {websites.map((website) => ( - - - - - - ))} - -
IDURLCreated At
{website.id}{website.website_url} - {new Date(website.created_at).toLocaleString()} -
-
-
- ) : ( -

No websites analyzed yet

- )} - -
- - View Raw Data - -
-              {JSON.stringify(websites, null, 2)}
-            
-
-
-
-
- ); -} diff --git a/examples/playground/app/websites/runs/[run_id]/page.tsx b/examples/playground/app/websites/runs/[run_id]/page.tsx deleted file mode 100644 index ee0d822de..000000000 --- a/examples/playground/app/websites/runs/[run_id]/page.tsx +++ /dev/null @@ -1,153 +0,0 @@ -'use client'; - -import { useParams } from 'next/navigation'; -import { FlowRunProvider, useFlowRun } from '@/components/flow-run-provider'; -import FlowRunDetails from '@/components/flow-run-details'; -import WebsiteAnalysisUI from '@/components/website-analysis-ui'; -import { useState } from 'react'; - -// Component that uses the shared context -function RunPageContent() { - const [isPinned, setIsPinned] = useState(() => { - // Initialize from localStorage if available (client-side only) - if (typeof window !== 'undefined') { - const saved = localStorage.getItem('techDetailsPinned'); - return saved !== null ? saved === 'true' : true; // Default to true if not set - } - return true; // Default to true - }); - - const { flowRun, loading, error } = useFlowRun(); - - return ( -
- {/* Debug panel: Technical details - first on mobile, right side on desktop */} -
-
- {/* Pin button for keeping sidebar visible - moved to right side */} -
{ - e.stopPropagation(); - const newPinned = !isPinned; - setIsPinned(newPinned); - // Save to localStorage - if (typeof window !== 'undefined') { - localStorage.setItem('techDetailsPinned', newPinned.toString()); - } - }} - > - - {isPinned ? "Panel always visible" : "Always show this panel"} - -
- {isPinned && ( - - - - )} -
-
- - {/* Interactive hover hint overlay - only shown when not pinned */} - {!isPinned && ( - <> -
- - {/* Large centered hint */} -
-
- - - - - -

- Hover to reveal -

-
- - {/* Pulsing animation to draw attention */} -
-
- - {/* Floating hint badge at bottom right */} -
- - - - - - - Hover to reveal -
- - )} - - -
-
- - {/* Main panel: User-friendly UI - second on mobile, left side on desktop */} -
- -
-
- ); -} - -export default function RunPage() { - const params = useParams(); - const runId = params.run_id as string; - - - return ( - - - - ); -} \ No newline at end of file diff --git a/examples/playground/app/websites/runs/layout.tsx b/examples/playground/app/websites/runs/layout.tsx deleted file mode 100644 index 7174ac1b0..000000000 --- a/examples/playground/app/websites/runs/layout.tsx +++ /dev/null @@ -1,7 +0,0 @@ -export default function RunsLayout({ - children, -}: { - children: React.ReactNode; -}) { - return
{children}
; -} diff --git a/examples/playground/app/websites/runs/page.tsx b/examples/playground/app/websites/runs/page.tsx deleted file mode 100644 index aca8b97e8..000000000 --- a/examples/playground/app/websites/runs/page.tsx +++ /dev/null @@ -1,120 +0,0 @@ -'use client'; - -import { createClient } from '@/utils/supabase/client'; -import { useEffect, useState } from 'react'; -import Link from 'next/link'; - -import { Database } from '@/supabase/functions/database-types'; - -type RunRow = Database['pgflow']['Tables']['runs']['Row']; - -export default function RunsListPage() { - const [runs, setRuns] = useState([]); - const [loading, setLoading] = useState(true); - const [error, setError] = useState(null); - const supabase = createClient(); - - useEffect(() => { - const fetchRuns = async () => { - try { - setLoading(true); - - const { data, error } = await supabase - .schema('pgflow') - .from('runs') - .select('*'); - - if (error) { - setError(`Error fetching runs: ${error.message}`); - return; - } - - setRuns(data || []); - } catch (err) { - console.error('Error fetching runs:', err); - setError('An error occurred while fetching runs'); - } finally { - setLoading(false); - } - }; - - fetchRuns(); - }, [supabase]); - - if (loading) { - return ( -
-
-
-
-

Loading runs...

-
-
-
- ); - } - - if (error) { - return ( -
-
-

Error

-

{error}

-
-
- ); - } - - return ( -
-

Website Analysis Runs

- - {runs.length === 0 ? ( -
-

No runs found

- - Analyze a website - -
- ) : ( -
- {runs.map((run) => ( - -
-
-

{run.flow_slug}

-

- {new Date(run.started_at).toLocaleString()} -

-
-
- - - {run.status || 'unknown'} - -
-
- - ))} -
- )} -
- ); -} diff --git a/examples/playground/components.json b/examples/playground/components.json deleted file mode 100644 index ec9676bf2..000000000 --- a/examples/playground/components.json +++ /dev/null @@ -1,17 +0,0 @@ -{ - "$schema": "https://ui.shadcn.com/schema.json", - "style": "default", - "rsc": true, - "tsx": true, - "tailwind": { - "config": "tailwind.config.ts", - "css": "app/globals.css", - "baseColor": "neutral", - "cssVariables": true, - "prefix": "" - }, - "aliases": { - "components": "@/components", - "utils": "@/lib/utils" - } -} diff --git a/examples/playground/components/deploy-button.tsx b/examples/playground/components/deploy-button.tsx deleted file mode 100644 index 8a5a19209..000000000 --- a/examples/playground/components/deploy-button.tsx +++ /dev/null @@ -1,25 +0,0 @@ -import Link from "next/link"; -import { Button } from "./ui/button"; - -export default function DeployButton() { - return ( - <> - - - - - ); -} diff --git a/examples/playground/components/env-var-warning.tsx b/examples/playground/components/env-var-warning.tsx deleted file mode 100644 index b6a193f6c..000000000 --- a/examples/playground/components/env-var-warning.tsx +++ /dev/null @@ -1,33 +0,0 @@ -import Link from "next/link"; -import { Badge } from "./ui/badge"; -import { Button } from "./ui/button"; - -export function EnvVarWarning() { - return ( -
- - Supabase environment variables required - -
- - -
-
- ); -} diff --git a/examples/playground/components/example-links.tsx b/examples/playground/components/example-links.tsx deleted file mode 100644 index 65a43a7d6..000000000 --- a/examples/playground/components/example-links.tsx +++ /dev/null @@ -1,44 +0,0 @@ -'use client'; - -import { useStartAnalysis } from '@/lib/hooks/use-start-analysis'; -import { exampleLinks } from '@/lib/example-links'; -import { useLoadingState } from './loading-state-provider'; - -export default function ExampleLinks() { - const { start, isPending } = useStartAnalysis(); - const { setLoading } = useLoadingState(); - - // Function to handle example link clicks - const handleExampleClick = (e: React.MouseEvent, url: string) => { - e.preventDefault(); - if (isPending) return; - - // Set global loading state to true - setLoading(true); - - // Start analysis will handle auth check and redirect - start(url); - }; - - return ( - - ); -} \ No newline at end of file diff --git a/examples/playground/components/flow-run-details.tsx b/examples/playground/components/flow-run-details.tsx deleted file mode 100644 index 43e5ae8ce..000000000 --- a/examples/playground/components/flow-run-details.tsx +++ /dev/null @@ -1,182 +0,0 @@ -'use client'; - -import { useEffect, useState } from 'react'; -import type { FlowRun } from '@pgflow/client'; -import type { AnyFlow } from '@pgflow/dsl'; -import { - Collapsible, - CollapsibleContent, - CollapsibleTrigger, -} from '@/components/ui/collapsible'; -import JSONHighlighter from '@/components/json-highlighter'; -import { FormMessage } from '@/components/form-message'; -import { useElapsedTime } from '@/lib/hooks/use-elapsed-time'; - -// Format time difference in a concise way (e.g., "5s", "3m 45s", "2h 15m") -function formatTimeDifference( - startDate: Date | null, - endDate: Date | null, -): string { - if (!startDate) return ''; - - const start = startDate; - const end = endDate || new Date(); - - const diffMs = end.getTime() - start.getTime(); - const diffSec = Math.floor(diffMs / 1000); - - if (diffSec < 1) { - return '< 1s'; - } - - if (diffSec < 60) { - return `${diffSec}s`; - } - - const minutes = Math.floor(diffSec / 60); - const seconds = diffSec % 60; - - if (minutes < 60) { - return seconds > 0 ? `${minutes}m ${seconds}s` : `${minutes}m`; - } - - const hours = Math.floor(minutes / 60); - const remainingMinutes = minutes % 60; - - return remainingMinutes > 0 ? `${hours}h ${remainingMinutes}m` : `${hours}h`; -} - - -interface FlowRunDetailsProps { - runId: string; - flowRun: FlowRun | null; - loading: boolean; - error: string | null; -} - -export default function FlowRunDetails({ - runId, - flowRun, - loading, - error, -}: FlowRunDetailsProps) { - const [, setRefresh] = useState(0); - const elapsedTimeRef = useElapsedTime(flowRun?.started_at || null); - - useEffect(() => { - if (!flowRun) return; - - // Subscribe to all run events to trigger re-renders - const unsubscribeRun = flowRun.on('*', () => { - setRefresh(prev => prev + 1); - }); - - return () => { - unsubscribeRun(); - }; - }, [flowRun]); - - if (loading) { - return ( -
-
Loading flow run...
-
- ); - } - - if (error) { - return ( -
- -
- ); - } - - if (!flowRun) { - return ( -
-
No flow run found
-
- ); - } - - return ( -
-
-
-

Technical Details

- - {/* Run Info */} -
-
- Run ID: - {runId} -
-
- Status: - - {flowRun.status} - -
-
- Remaining Steps: - {flowRun.remaining_steps} -
- {flowRun.started_at && ( -
- Started: - -
- )} - {flowRun.completed_at && ( -
- Duration: - {formatTimeDifference(flowRun.started_at, flowRun.completed_at)} -
- )} -
-
- - {/* Flow Input/Output */} - {flowRun.input && ( - - -
- Flow Input -
-
- -
-
- -
-
-
-
- )} - - {flowRun.output && ( - - -
- Flow Output -
-
- -
-
- -
-
-
-
- )} -
-
- ); -} \ No newline at end of file diff --git a/examples/playground/components/flow-run-provider.tsx b/examples/playground/components/flow-run-provider.tsx deleted file mode 100644 index 81904337a..000000000 --- a/examples/playground/components/flow-run-provider.tsx +++ /dev/null @@ -1,121 +0,0 @@ -'use client'; - -import { createContext, useContext, useEffect, useState } from 'react'; -import { usePgflowClient } from '@/lib/pgflow-client-provider'; -import { useLoadingState } from './loading-state-provider'; -import type { FlowRun } from '@pgflow/client'; -import type { AnyFlow } from '@pgflow/dsl'; - -interface FlowRunContextType { - flowRun: FlowRun | null; - loading: boolean; - error: string | null; -} - -const FlowRunContext = createContext({ - flowRun: null, - loading: true, - error: null, -}); - -export const useFlowRun = () => useContext(FlowRunContext); - -interface FlowRunProviderProps { - runId: string; - children: React.ReactNode; -} - -export function FlowRunProvider({ runId, children }: FlowRunProviderProps) { - const [flowRun, setFlowRun] = useState | null>(null); - const [loading, setLoading] = useState(true); - const [error, setError] = useState(null); - - const { setLoading: setGlobalLoading } = useLoadingState(); - const pgflow = usePgflowClient(); - - useEffect(() => { - if (!runId) return; - - setLoading(true); - // Set global loading state to true when initially loading run data - setGlobalLoading(true); - - // Track if this effect is still mounted - let isMounted = true; - - // Load the flow run - const loadFlowRun = async () => { - try { - - // Just call getRun - the PgflowClient will return cached instance if exists - const run = await pgflow.getRun(runId); - - if (!run) { - if (isMounted) { - setError('Flow run not found'); - setGlobalLoading(false); - setLoading(false); - } - return; - } - - if (!isMounted) return; - - setFlowRun(run); - - // Subscribe to all run events to update global loading - const unsubscribeStatus = run.on('*', (event) => { - if (event.status === 'completed' || event.status === 'failed') { - setGlobalLoading(false); - } - }); - - // Check initial status - const currentStatus = run.status; - if (currentStatus === 'completed' || currentStatus === 'failed') { - setGlobalLoading(false); - } - - if (isMounted) { - setLoading(false); - } - - // Return cleanup function - return () => { - unsubscribeStatus(); - // Don't dispose the run - let PgflowClient manage its cache - // pgflow.dispose(runId); - }; - } catch (err) { - if (isMounted) { - setError(err instanceof Error ? err.message : 'Failed to load flow run'); - setGlobalLoading(false); - setLoading(false); - } - return undefined; - } - }; - - let cleanup: (() => void) | undefined; - loadFlowRun().then(cleanupFn => { - if (isMounted) { - cleanup = cleanupFn; - } - }); - - return () => { - isMounted = false; - cleanup?.(); - }; - }, [runId, setGlobalLoading, pgflow]); - - const value = { - flowRun, - loading, - error, - }; - - return ( - {children} - ); -} \ No newline at end of file diff --git a/examples/playground/components/form-message.tsx b/examples/playground/components/form-message.tsx deleted file mode 100644 index 9917a814f..000000000 --- a/examples/playground/components/form-message.tsx +++ /dev/null @@ -1,26 +0,0 @@ -export type Message = - | { success: string } - | { error: string } - | { message: string }; - -export function FormMessage({ message }: { message: Message | null }) { - if (!message) return null; - - return ( -
- {'success' in message && ( -
- {message.success} -
- )} - {'error' in message && ( -
- {message.error} -
- )} - {'message' in message && ( -
{message.message}
- )} -
- ); -} diff --git a/examples/playground/components/github-button.tsx b/examples/playground/components/github-button.tsx deleted file mode 100644 index 5346ed858..000000000 --- a/examples/playground/components/github-button.tsx +++ /dev/null @@ -1,67 +0,0 @@ -'use client'; - -import { Button } from '@/components/ui/button'; -import { createClient } from '@/utils/supabase/client'; -import { useState } from 'react'; - -interface GithubButtonProps { - className?: string; - onLoadingChange?: (isLoading: boolean) => void; - disabled?: boolean; - text?: string; -} - -export function GithubButton({ className = '', onLoadingChange, disabled, text = 'Sign in with GitHub' }: GithubButtonProps) { - const [isLoadingLocal, setIsLoadingLocal] = useState(false); - - const handleGithubSignIn = async () => { - try { - setIsLoadingLocal(true); - onLoadingChange?.(true); - const supabase = createClient(); - const { data, error } = await supabase.auth.signInWithOAuth({ - provider: 'github', - options: { - redirectTo: `${window.location.origin}/auth/callback`, - }, - }); - - if (error) { - console.error('GitHub OAuth error:', error); - setIsLoadingLocal(false); - onLoadingChange?.(false); - return; - } - - // Redirect to the OAuth URL - if (data?.url) { - window.location.href = data.url; - } - } catch (error) { - console.error('GitHub sign-in error:', error); - setIsLoadingLocal(false); - onLoadingChange?.(false); - } - }; - - return ( - - ); -} \ No newline at end of file diff --git a/examples/playground/components/header-auth.tsx b/examples/playground/components/header-auth.tsx deleted file mode 100644 index e0ac584da..000000000 --- a/examples/playground/components/header-auth.tsx +++ /dev/null @@ -1,103 +0,0 @@ -'use client' - -import { signOutAction } from "@/app/actions"; -import { hasEnvVars } from "@/utils/supabase/check-env-vars"; -import Link from "next/link"; -import { Badge } from "./ui/badge"; -import { Button } from "./ui/button"; -import { createClient } from "@/utils/supabase/client"; -import { SpinnerWrapper } from "./spinner-wrapper"; -import { useEffect, useState } from "react"; -import type { User } from "@supabase/supabase-js"; - -export default function AuthButton() { - const [user, setUser] = useState(null); - const [checking, setChecking] = useState(true); - - useEffect(() => { - const supabase = createClient(); - - // Initial auth check - supabase.auth.getUser().then(({ data, error }) => { - console.log('Auth check - user:', data.user); - console.log('Auth check - error:', error); - setUser(data.user ?? null); - setChecking(false); - }); - - // Listen for auth state changes - const { data: { subscription } } = supabase.auth.onAuthStateChange((_event, session) => { - console.log('Auth state changed:', _event, session?.user); - setUser(session?.user ?? null); - }); - - return () => subscription.unsubscribe(); - }, []); - - if (!hasEnvVars) { - return ( - <> -
-
- - Please update .env.local file with anon key and url - -
-
- - -
-
- - ); - } - - if (checking) { - return ; - } - - return user ? ( -
- -
- Hey, {user.email}! -
-
- -
-
- ) : ( -
- -
- - -
-
- ); -} diff --git a/examples/playground/components/hero.tsx b/examples/playground/components/hero.tsx deleted file mode 100644 index e7815d1a0..000000000 --- a/examples/playground/components/hero.tsx +++ /dev/null @@ -1,44 +0,0 @@ -import NextLogo from './next-logo'; -import SupabaseLogo from './supabase-logo'; - -export default function Header() { - return ( -
-
- - - - - - - -
-

Supabase and Next.js Starter Template

-

- The fastest way to build apps with{' '} - - Supabase - {' '} - and{' '} - - Next.js - -

-
-
- ); -} diff --git a/examples/playground/components/json-highlighter.tsx b/examples/playground/components/json-highlighter.tsx deleted file mode 100644 index e82093980..000000000 --- a/examples/playground/components/json-highlighter.tsx +++ /dev/null @@ -1,33 +0,0 @@ -import { Json } from '@/supabase/functions/database-types'; - -export default function JSONHighlighter({ data }: { data: Json }) { - const jsonString = JSON.stringify(data, null, 2) - .replace(/&/g, '&') - .replace(//g, '>') - .replace( - /("(\\u[\dA-Fa-f]{4}|\\[^u]|[^\\"])*"|\b(true|false|null)\b|-?\d+(?:\.\d*)?(?:[eE][+-]?\d+)?)/g, - (match) => { - let className = 'text-yellow-500'; // number - using yellow-500 (#eab308) - if (/^"/.test(match)) { - className = match.endsWith(':') - ? 'text-blue-500' // key - using blue-500 (#3b82f6) - : 'text-green-500'; // string - using green-500 (#22c55e) - } else if (/true|false/.test(match)) { - className = 'text-blue-500'; // boolean - using blue-500 (#3b82f6) - } else if (/null/.test(match)) { - className = 'text-red-500'; // null - using red-500 (#ef4444) - } - return `${match}`; - }, - ); - - return ( -
-      
-    
- ); -} diff --git a/examples/playground/components/loading-state-provider.tsx b/examples/playground/components/loading-state-provider.tsx deleted file mode 100644 index e9bafcf5e..000000000 --- a/examples/playground/components/loading-state-provider.tsx +++ /dev/null @@ -1,36 +0,0 @@ -'use client'; - -import { createContext, useContext, useState, ReactNode, useEffect, useCallback } from 'react'; -import { usePathname } from 'next/navigation'; - -interface LoadingState { - isLoading: boolean; - setLoading: (loading: boolean) => void; -} - -const LoadingStateContext = createContext({ - isLoading: false, - setLoading: () => { /* Default implementation */ }, -}); - -export const useLoadingState = () => useContext(LoadingStateContext); - -export function LoadingStateProvider({ children }: { children: ReactNode }) { - const [isLoading, setIsLoading] = useState(false); - const pathname = usePathname(); - - // Reset loading state when pathname changes - useEffect(() => { - setIsLoading(false); - }, [pathname]); - - const setLoading = useCallback((loading: boolean) => { - setIsLoading(loading); - }, []); - - return ( - - {children} - - ); -} \ No newline at end of file diff --git a/examples/playground/components/mobile-links.tsx b/examples/playground/components/mobile-links.tsx deleted file mode 100644 index fc6554e53..000000000 --- a/examples/playground/components/mobile-links.tsx +++ /dev/null @@ -1,83 +0,0 @@ -'use client'; - -import { useState } from 'react'; -import { BookOpen, Github, Twitter, AlertCircle, MessagesSquare, MoreHorizontal } from 'lucide-react'; - -export function MobileLinks() { - const [isOpen, setIsOpen] = useState(false); - - return ( -
- - - {isOpen && ( - - )} -
- ); -} \ No newline at end of file diff --git a/examples/playground/components/next-logo.tsx b/examples/playground/components/next-logo.tsx deleted file mode 100644 index 165558213..000000000 --- a/examples/playground/components/next-logo.tsx +++ /dev/null @@ -1,46 +0,0 @@ -export default function NextLogo() { - return ( - - - - - - - - - - - ); -} diff --git a/examples/playground/components/skeleton-table.tsx b/examples/playground/components/skeleton-table.tsx deleted file mode 100644 index 60cfb6bc3..000000000 --- a/examples/playground/components/skeleton-table.tsx +++ /dev/null @@ -1,40 +0,0 @@ -import { Skeleton } from "@/components/ui/skeleton"; - -export function SkeletonTable() { - return ( -
-
- - - - - - - - - - {[...Array(3)].map((_, i) => ( - - - - - - ))} - -
- - - - - -
- - - - - -
-
-
- ); -} \ No newline at end of file diff --git a/examples/playground/components/spinner-wrapper.tsx b/examples/playground/components/spinner-wrapper.tsx deleted file mode 100644 index 7643b53c9..000000000 --- a/examples/playground/components/spinner-wrapper.tsx +++ /dev/null @@ -1,7 +0,0 @@ -'use client'; - -import { Spinner } from './spinner'; - -export function SpinnerWrapper() { - return ; -} \ No newline at end of file diff --git a/examples/playground/components/spinner.tsx b/examples/playground/components/spinner.tsx deleted file mode 100644 index 8e8bd3a27..000000000 --- a/examples/playground/components/spinner.tsx +++ /dev/null @@ -1,19 +0,0 @@ -'use client'; - -import { useLoadingState } from './loading-state-provider'; - -export function Spinner() { - const { isLoading } = useLoadingState(); - - if (!isLoading) return null; - - return ( -
-
- Processing... -
- ); -} \ No newline at end of file diff --git a/examples/playground/components/submit-button.tsx b/examples/playground/components/submit-button.tsx deleted file mode 100644 index 23ba0f175..000000000 --- a/examples/playground/components/submit-button.tsx +++ /dev/null @@ -1,35 +0,0 @@ -"use client"; - -import { Button } from "@/components/ui/button"; -import { type ComponentProps } from "react"; -import { useFormStatus } from "react-dom"; - -type Props = ComponentProps & { - pendingText?: string; -}; - -export function SubmitButton({ - children, - pendingText = "Submitting...", - ...props -}: Props) { - const { pending } = useFormStatus(); - - return ( - - ); -} diff --git a/examples/playground/components/supabase-logo.tsx b/examples/playground/components/supabase-logo.tsx deleted file mode 100644 index 96a56a54a..000000000 --- a/examples/playground/components/supabase-logo.tsx +++ /dev/null @@ -1,102 +0,0 @@ -export default function SupabaseLogo() { - return ( - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ); -} diff --git a/examples/playground/components/theme-switcher.tsx b/examples/playground/components/theme-switcher.tsx deleted file mode 100644 index d838e4042..000000000 --- a/examples/playground/components/theme-switcher.tsx +++ /dev/null @@ -1,78 +0,0 @@ -"use client"; - -import { Button } from "@/components/ui/button"; -import { - DropdownMenu, - DropdownMenuContent, - DropdownMenuRadioGroup, - DropdownMenuRadioItem, - DropdownMenuTrigger, -} from "@/components/ui/dropdown-menu"; -import { Laptop, Moon, Sun } from "lucide-react"; -import { useTheme } from "next-themes"; -import { useEffect, useState } from "react"; - -const ThemeSwitcher = () => { - const [mounted, setMounted] = useState(false); - const { theme, setTheme } = useTheme(); - - // useEffect only runs on the client, so now we can safely show the UI - useEffect(() => { - setMounted(true); - }, []); - - if (!mounted) { - return null; - } - - const ICON_SIZE = 16; - - return ( - - - - - - setTheme(e)} - > - - {" "} - Light - - - {" "} - Dark - - - {" "} - System - - - - - ); -}; - -export { ThemeSwitcher }; diff --git a/examples/playground/components/tutorial/code-block.tsx b/examples/playground/components/tutorial/code-block.tsx deleted file mode 100644 index 9f1b13d64..000000000 --- a/examples/playground/components/tutorial/code-block.tsx +++ /dev/null @@ -1,61 +0,0 @@ -"use client"; - -import { useState } from "react"; -import { Button } from "../ui/button"; - -const CopyIcon = () => ( - - - - -); - -const CheckIcon = () => ( - - - -); - -export function CodeBlock({ code }: { code: string }) { - const [icon, setIcon] = useState(CopyIcon); - - const copy = async () => { - await navigator?.clipboard?.writeText(code); - setIcon(CheckIcon); - setTimeout(() => setIcon(CopyIcon), 2000); - }; - - return ( -
-      
-      {code}
-    
- ); -} diff --git a/examples/playground/components/tutorial/connect-supabase-steps.tsx b/examples/playground/components/tutorial/connect-supabase-steps.tsx deleted file mode 100644 index 04ca37f0e..000000000 --- a/examples/playground/components/tutorial/connect-supabase-steps.tsx +++ /dev/null @@ -1,62 +0,0 @@ -import { TutorialStep } from "./tutorial-step"; - -export default function ConnectSupabaseSteps() { - return ( -
    - -

    - Head over to{" "} - - database.new - {" "} - and create a new Supabase project. -

    -
    - - -

    - Rename the{" "} - - .env.example - {" "} - file in your Next.js app to{" "} - - .env.local - {" "} - and populate with values from{" "} - - your Supabase project's API Settings - - . -

    -
    - - -

    - You may need to quit your Next.js development server and run{" "} - - npm run dev - {" "} - again to load the new environment variables. -

    -
    - - -

    - You may need to refresh the page for Next.js to load the new - environment variables. -

    -
    -
- ); -} diff --git a/examples/playground/components/tutorial/fetch-data-steps.tsx b/examples/playground/components/tutorial/fetch-data-steps.tsx deleted file mode 100644 index 23c7df52d..000000000 --- a/examples/playground/components/tutorial/fetch-data-steps.tsx +++ /dev/null @@ -1,96 +0,0 @@ -import { TutorialStep } from "./tutorial-step"; -import { CodeBlock } from "./code-block"; - -const create = `create table notes ( - id bigserial primary key, - title text -); - -insert into notes(title) -values - ('Today I created a Supabase project.'), - ('I added some data and queried it from Next.js.'), - ('It was awesome!'); -`.trim(); - -const server = `import { createClient } from '@/utils/supabase/server' - -export default async function Page() { - const supabase = await createClient() - const { data: notes } = await supabase.from('notes').select() - - return
{JSON.stringify(notes, null, 2)}
-} -`.trim(); - -const client = `'use client' - -import { createClient } from '@/utils/supabase/client' -import { useEffect, useState } from 'react' - -export default function Page() { - const [notes, setNotes] = useState(null) - const supabase = createClient() - - useEffect(() => { - const getData = async () => { - const { data } = await supabase.from('notes').select() - setNotes(data) - } - getData() - }, []) - - return
{JSON.stringify(notes, null, 2)}
-} -`.trim(); - -export default function FetchDataSteps() { - return ( -
    - -

    - Head over to the{" "} - - Table Editor - {" "} - for your Supabase project to create a table and insert some example - data. If you're stuck for creativity, you can copy and paste the - following into the{" "} - - SQL Editor - {" "} - and click RUN! -

    - -
    - - -

    - To create a Supabase client and query data from an Async Server - Component, create a new page.tsx file at{" "} - - /app/notes/page.tsx - {" "} - and add the following. -

    - -

    Alternatively, you can use a Client Component.

    - -
    - - -

    You're ready to launch your product to the world! 🚀

    -
    -
- ); -} diff --git a/examples/playground/components/tutorial/sign-up-user-steps.tsx b/examples/playground/components/tutorial/sign-up-user-steps.tsx deleted file mode 100644 index c00fb66e3..000000000 --- a/examples/playground/components/tutorial/sign-up-user-steps.tsx +++ /dev/null @@ -1,88 +0,0 @@ -import Link from "next/link"; -import { TutorialStep } from "./tutorial-step"; -import { ArrowUpRight } from "lucide-react"; - -export default function SignUpUserSteps() { - return ( -
    - {process.env.VERCEL_ENV === "preview" || - process.env.VERCEL_ENV === "production" ? ( - -

    It looks like this App is hosted on Vercel.

    -

    - This particular deployment is - - "{process.env.VERCEL_ENV}" - {" "} - on - - https://{process.env.VERCEL_URL} - - . -

    -

    - You will need to{" "} - - update your Supabase project - {" "} - with redirect URLs based on your Vercel deployment URLs. -

    -
      -
    • - -{" "} - - http://localhost:3000/** - -
    • -
    • - -{" "} - - {`https://${process.env.VERCEL_PROJECT_PRODUCTION_URL}/**`} - -
    • -
    • - -{" "} - - {`https://${process.env.VERCEL_PROJECT_PRODUCTION_URL?.replace(".vercel.app", "")}-*-[vercel-team-url].vercel.app/**`} - {" "} - (Vercel Team URL can be found in{" "} - - Vercel Team settings - - ) -
    • -
    - - Redirect URLs Docs - -
    - ) : null} - -

    - Head over to the{" "} - - Sign up - {" "} - page and sign up your first user. It's okay if this is just you for - now. Your awesome idea will have plenty of users later! -

    -
    -
- ); -} diff --git a/examples/playground/components/tutorial/tutorial-step.tsx b/examples/playground/components/tutorial/tutorial-step.tsx deleted file mode 100644 index 0ab9cd4e3..000000000 --- a/examples/playground/components/tutorial/tutorial-step.tsx +++ /dev/null @@ -1,30 +0,0 @@ -import { Checkbox } from "../ui/checkbox"; - -export function TutorialStep({ - title, - children, -}: { - title: string; - children: React.ReactNode; -}) { - return ( -
  • - - -
  • - ); -} diff --git a/examples/playground/components/typography/inline-code.tsx b/examples/playground/components/typography/inline-code.tsx deleted file mode 100644 index 288f9e30e..000000000 --- a/examples/playground/components/typography/inline-code.tsx +++ /dev/null @@ -1,7 +0,0 @@ -export function TypographyInlineCode() { - return ( - - @radix-ui/react-alert-dialog - - ); -} diff --git a/examples/playground/components/ui/badge.tsx b/examples/playground/components/ui/badge.tsx deleted file mode 100644 index d3d5d6040..000000000 --- a/examples/playground/components/ui/badge.tsx +++ /dev/null @@ -1,36 +0,0 @@ -import * as React from "react"; -import { cva, type VariantProps } from "class-variance-authority"; - -import { cn } from "@/lib/utils"; - -const badgeVariants = cva( - "inline-flex items-center rounded-full border px-2.5 py-0.5 text-xs font-semibold transition-colors focus:outline-none focus:ring-2 focus:ring-ring focus:ring-offset-2", - { - variants: { - variant: { - default: - "border-transparent bg-primary text-primary-foreground hover:bg-primary/80", - secondary: - "border-transparent bg-secondary text-secondary-foreground hover:bg-secondary/80", - destructive: - "border-transparent bg-destructive text-destructive-foreground hover:bg-destructive/80", - outline: "text-foreground", - }, - }, - defaultVariants: { - variant: "default", - }, - }, -); - -export interface BadgeProps - extends React.HTMLAttributes, - VariantProps {} - -function Badge({ className, variant, ...props }: BadgeProps) { - return ( -
    - ); -} - -export { Badge, badgeVariants }; diff --git a/examples/playground/components/ui/button.tsx b/examples/playground/components/ui/button.tsx deleted file mode 100644 index 57c9fe47e..000000000 --- a/examples/playground/components/ui/button.tsx +++ /dev/null @@ -1,56 +0,0 @@ -import * as React from "react"; -import { Slot } from "@radix-ui/react-slot"; -import { cva, type VariantProps } from "class-variance-authority"; - -import { cn } from "@/lib/utils"; - -const buttonVariants = cva( - "inline-flex items-center justify-center whitespace-nowrap rounded-md text-sm font-medium ring-offset-background transition-colors focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring focus-visible:ring-offset-2 disabled:pointer-events-none disabled:opacity-50", - { - variants: { - variant: { - default: "bg-primary text-primary-foreground hover:bg-primary/90", - destructive: - "bg-destructive text-destructive-foreground hover:bg-destructive/90", - outline: - "border border-input bg-background hover:bg-accent hover:text-accent-foreground", - secondary: - "bg-secondary text-secondary-foreground hover:bg-secondary/80", - ghost: "hover:bg-accent hover:text-accent-foreground", - link: "text-primary underline-offset-4 hover:underline", - }, - size: { - default: "h-10 px-4 py-2", - sm: "h-9 rounded-md px-3", - lg: "h-11 rounded-md px-8", - icon: "h-10 w-10", - }, - }, - defaultVariants: { - variant: "default", - size: "default", - }, - }, -); - -export interface ButtonProps - extends React.ButtonHTMLAttributes, - VariantProps { - asChild?: boolean; -} - -const Button = React.forwardRef( - ({ className, variant, size, asChild = false, ...props }, ref) => { - const Comp = asChild ? Slot : "button"; - return ( - - ); - }, -); -Button.displayName = "Button"; - -export { Button, buttonVariants }; diff --git a/examples/playground/components/ui/checkbox.tsx b/examples/playground/components/ui/checkbox.tsx deleted file mode 100644 index 5985e3c3e..000000000 --- a/examples/playground/components/ui/checkbox.tsx +++ /dev/null @@ -1,30 +0,0 @@ -"use client"; - -import * as React from "react"; -import * as CheckboxPrimitive from "@radix-ui/react-checkbox"; -import { Check } from "lucide-react"; - -import { cn } from "@/lib/utils"; - -const Checkbox = React.forwardRef< - React.ElementRef, - React.ComponentPropsWithoutRef ->(({ className, ...props }, ref) => ( - - - - - -)); -Checkbox.displayName = CheckboxPrimitive.Root.displayName; - -export { Checkbox }; diff --git a/examples/playground/components/ui/collapsible.tsx b/examples/playground/components/ui/collapsible.tsx deleted file mode 100644 index 9fa48946a..000000000 --- a/examples/playground/components/ui/collapsible.tsx +++ /dev/null @@ -1,11 +0,0 @@ -"use client" - -import * as CollapsiblePrimitive from "@radix-ui/react-collapsible" - -const Collapsible = CollapsiblePrimitive.Root - -const CollapsibleTrigger = CollapsiblePrimitive.CollapsibleTrigger - -const CollapsibleContent = CollapsiblePrimitive.CollapsibleContent - -export { Collapsible, CollapsibleTrigger, CollapsibleContent } diff --git a/examples/playground/components/ui/dropdown-menu.tsx b/examples/playground/components/ui/dropdown-menu.tsx deleted file mode 100644 index 3a0c7fed7..000000000 --- a/examples/playground/components/ui/dropdown-menu.tsx +++ /dev/null @@ -1,200 +0,0 @@ -"use client"; - -import * as React from "react"; -import * as DropdownMenuPrimitive from "@radix-ui/react-dropdown-menu"; -import { Check, ChevronRight, Circle } from "lucide-react"; - -import { cn } from "@/lib/utils"; - -const DropdownMenu = DropdownMenuPrimitive.Root; - -const DropdownMenuTrigger = DropdownMenuPrimitive.Trigger; - -const DropdownMenuGroup = DropdownMenuPrimitive.Group; - -const DropdownMenuPortal = DropdownMenuPrimitive.Portal; - -const DropdownMenuSub = DropdownMenuPrimitive.Sub; - -const DropdownMenuRadioGroup = DropdownMenuPrimitive.RadioGroup; - -const DropdownMenuSubTrigger = React.forwardRef< - React.ElementRef, - React.ComponentPropsWithoutRef & { - inset?: boolean; - } ->(({ className, inset, children, ...props }, ref) => ( - - {children} - - -)); -DropdownMenuSubTrigger.displayName = - DropdownMenuPrimitive.SubTrigger.displayName; - -const DropdownMenuSubContent = React.forwardRef< - React.ElementRef, - React.ComponentPropsWithoutRef ->(({ className, ...props }, ref) => ( - -)); -DropdownMenuSubContent.displayName = - DropdownMenuPrimitive.SubContent.displayName; - -const DropdownMenuContent = React.forwardRef< - React.ElementRef, - React.ComponentPropsWithoutRef ->(({ className, sideOffset = 4, ...props }, ref) => ( - - - -)); -DropdownMenuContent.displayName = DropdownMenuPrimitive.Content.displayName; - -const DropdownMenuItem = React.forwardRef< - React.ElementRef, - React.ComponentPropsWithoutRef & { - inset?: boolean; - } ->(({ className, inset, ...props }, ref) => ( - -)); -DropdownMenuItem.displayName = DropdownMenuPrimitive.Item.displayName; - -const DropdownMenuCheckboxItem = React.forwardRef< - React.ElementRef, - React.ComponentPropsWithoutRef ->(({ className, children, checked, ...props }, ref) => ( - - - - - - - {children} - -)); -DropdownMenuCheckboxItem.displayName = - DropdownMenuPrimitive.CheckboxItem.displayName; - -const DropdownMenuRadioItem = React.forwardRef< - React.ElementRef, - React.ComponentPropsWithoutRef ->(({ className, children, ...props }, ref) => ( - - - - - - - {children} - -)); -DropdownMenuRadioItem.displayName = DropdownMenuPrimitive.RadioItem.displayName; - -const DropdownMenuLabel = React.forwardRef< - React.ElementRef, - React.ComponentPropsWithoutRef & { - inset?: boolean; - } ->(({ className, inset, ...props }, ref) => ( - -)); -DropdownMenuLabel.displayName = DropdownMenuPrimitive.Label.displayName; - -const DropdownMenuSeparator = React.forwardRef< - React.ElementRef, - React.ComponentPropsWithoutRef ->(({ className, ...props }, ref) => ( - -)); -DropdownMenuSeparator.displayName = DropdownMenuPrimitive.Separator.displayName; - -const DropdownMenuShortcut = ({ - className, - ...props -}: React.HTMLAttributes) => { - return ( - - ); -}; -DropdownMenuShortcut.displayName = "DropdownMenuShortcut"; - -export { - DropdownMenu, - DropdownMenuTrigger, - DropdownMenuContent, - DropdownMenuItem, - DropdownMenuCheckboxItem, - DropdownMenuRadioItem, - DropdownMenuLabel, - DropdownMenuSeparator, - DropdownMenuShortcut, - DropdownMenuGroup, - DropdownMenuPortal, - DropdownMenuSub, - DropdownMenuSubContent, - DropdownMenuSubTrigger, - DropdownMenuRadioGroup, -}; diff --git a/examples/playground/components/ui/input.tsx b/examples/playground/components/ui/input.tsx deleted file mode 100644 index 2380fc98d..000000000 --- a/examples/playground/components/ui/input.tsx +++ /dev/null @@ -1,24 +0,0 @@ -import * as React from 'react'; - -import { cn } from '@/lib/utils'; - -export type InputProps = React.InputHTMLAttributes; - -const Input = React.forwardRef( - ({ className, type, ...props }, ref) => { - return ( - - ); - }, -); -Input.displayName = 'Input'; - -export { Input }; diff --git a/examples/playground/components/ui/label.tsx b/examples/playground/components/ui/label.tsx deleted file mode 100644 index 84f8b0c70..000000000 --- a/examples/playground/components/ui/label.tsx +++ /dev/null @@ -1,26 +0,0 @@ -"use client"; - -import * as React from "react"; -import * as LabelPrimitive from "@radix-ui/react-label"; -import { cva, type VariantProps } from "class-variance-authority"; - -import { cn } from "@/lib/utils"; - -const labelVariants = cva( - "text-sm font-medium leading-none peer-disabled:cursor-not-allowed peer-disabled:opacity-70", -); - -const Label = React.forwardRef< - React.ElementRef, - React.ComponentPropsWithoutRef & - VariantProps ->(({ className, ...props }, ref) => ( - -)); -Label.displayName = LabelPrimitive.Root.displayName; - -export { Label }; diff --git a/examples/playground/components/ui/skeleton.tsx b/examples/playground/components/ui/skeleton.tsx deleted file mode 100644 index bee96db9e..000000000 --- a/examples/playground/components/ui/skeleton.tsx +++ /dev/null @@ -1,15 +0,0 @@ -import { cn } from "@/lib/utils" - -function Skeleton({ - className, - ...props -}: React.HTMLAttributes) { - return ( -
    - ) -} - -export { Skeleton } \ No newline at end of file diff --git a/examples/playground/components/website-analysis-ui.tsx b/examples/playground/components/website-analysis-ui.tsx deleted file mode 100644 index 93c412755..000000000 --- a/examples/playground/components/website-analysis-ui.tsx +++ /dev/null @@ -1,236 +0,0 @@ -'use client'; - -import React, { useState, useEffect } from 'react'; -import type { FlowRun } from '@pgflow/client'; -import type { AnyFlow } from '@pgflow/dsl'; -import { Badge } from '@/components/ui/badge'; -import { Button } from '@/components/ui/button'; -import { motion, AnimatePresence } from 'framer-motion'; -import { ChevronDown, ChevronUp } from 'lucide-react'; - -interface WebsiteAnalysisUIProps { - flowRun: FlowRun | null; - loading: boolean; - error: string | null; -} - -export default function WebsiteAnalysisUI({ - flowRun, - loading, - error, -}: WebsiteAnalysisUIProps) { - const [analysisExpanded, setAnalysisExpanded] = useState(true); - const [, setRefresh] = useState(0); - - useEffect(() => { - if (!flowRun) return; - - const unsubscribes: (() => void)[] = []; - - // Subscribe to flow run events - unsubscribes.push( - flowRun.on('*', () => { - setRefresh(prev => prev + 1); - }) - ); - - // Subscribe to step events for the steps we care about - const stepSlugs = ['website', 'summary', 'tags', 'saveToDb']; - for (const stepSlug of stepSlugs) { - const step = flowRun.step(stepSlug); - unsubscribes.push( - step.on('*', () => { - setRefresh(prev => prev + 1); - }) - ); - } - - return () => { - unsubscribes.forEach(unsubscribe => unsubscribe()); - }; - }, [flowRun]); - - // Get the input URL from the run - const analyzedUrl = flowRun?.input?.url || ''; - - // Check if analysis is complete - const isAnalysisComplete = flowRun?.status === 'completed'; - - // Get summary and tags from step outputs - // We'll need to access these through the step() method - const summaryStep = flowRun?.step('summary'); - const tagsStep = flowRun?.step('tags'); - - // The summary step returns a string directly - const summary = typeof summaryStep?.output === 'string' ? summaryStep.output : null; - // The tags step returns an array of strings directly - const tags = Array.isArray(tagsStep?.output) ? tagsStep.output : []; - - - if (loading) { - return
    Loading...
    ; - } - - if (error) { - return
    Error: {String(error)}
    ; - } - - return ( -
    - {/* Header Section */} -
    -

    Website Analyzer

    -

    - Analyze any website using AI to extract key information, generate summaries, and identify relevant tags. -

    -
    - - - {/* Analysis Results */} - {flowRun && ( -
    - - - - - - - {analysisExpanded && ( - - {/* Progress Steps */} -
    - {['website', 'summary', 'tags', 'saveToDb'].map((stepSlug) => { - const step = flowRun.step(stepSlug); - const displayName = stepSlug === 'saveToDb' ? 'Save to Database' : stepSlug; - const stepStatus = step?.status || 'pending'; - return ( -
    - - {displayName.replace(/_/g, ' ')} - - - {stepStatus} - -
    - ); - })} -
    - - {/* Summary Section */} - {isAnalysisComplete && summary && ( -
    -

    Summary

    -

    - {String(summary)} -

    -
    - )} - - {/* Tags Section */} - {isAnalysisComplete && tags.length > 0 && ( -
    -

    Tags

    -
    - {tags.map((tag: string, index: number) => ( - - {String(tag)} - - ))} -
    -
    - )} - - {/* Status Message */} - {!isAnalysisComplete && ( -
    -

    - Analysis in progress... This may take a few moments. -

    -
    - )} -
    - )} -
    -
    -
    -
    - )} -
    - ); -} - -// Helper component for collapsible sections -function Collapsible({ - children, - open, - onOpenChange -}: { - children: React.ReactNode; - open: boolean; - onOpenChange: (open: boolean) => void; -}) { - return ( -
    - {React.Children.map(children, child => { - if (React.isValidElement(child)) { - if (child.type === CollapsibleTrigger) { - return React.cloneElement(child as React.ReactElement<{onClick?: () => void}>, { - onClick: () => onOpenChange(!open) - }); - } - if (child.type === CollapsibleContent) { - return open ? child : null; - } - } - return child; - })} -
    - ); -} - -function CollapsibleTrigger({ - children, - asChild, - onClick -}: { - children: React.ReactNode; - asChild?: boolean; - onClick?: () => void; -}) { - if (asChild && React.isValidElement(children)) { - return React.cloneElement(children as React.ReactElement<{onClick?: () => void}>, { onClick }); - } - return
    {children}
    ; -} - -function CollapsibleContent({ children }: { children: React.ReactNode }) { - return <>{children}; -} \ No newline at end of file diff --git a/examples/playground/components/website-analyzer-form.tsx b/examples/playground/components/website-analyzer-form.tsx deleted file mode 100644 index ffcb4339d..000000000 --- a/examples/playground/components/website-analyzer-form.tsx +++ /dev/null @@ -1,72 +0,0 @@ -'use client'; - -import { useStartAnalysis } from '@/lib/hooks/use-start-analysis'; -import { useEffect, useState } from 'react'; -import { createClient } from '@/utils/supabase/client'; -import { Label } from '@/components/ui/label'; -import { Input } from '@/components/ui/input'; -import { FormMessage } from '@/components/form-message'; -import { SubmitButton } from '@/components/submit-button'; -import { useLoadingState } from './loading-state-provider'; - -export default function WebsiteAnalyzerForm() { - const { start: startAnalysis, isPending, error: formError } = useStartAnalysis(); - const [isLoggedIn, setIsLoggedIn] = useState(null); - const supabase = createClient(); - const { setLoading } = useLoadingState(); - - useEffect(() => { - supabase.auth.getUser().then(({ data }) => { - setIsLoggedIn(!!data.user); - }); - }, []); - - async function handleAnalyzeWebsite(formData: FormData) { - const url = formData.get('url') as string; - - if (!url) { - return; - } - - // Set global loading state to true - setLoading(true); - - // Start analysis will handle auth check and redirect - startAnalysis(url); - } - - return ( -
    -

    Analyze a Website

    -

    - Enter a URL to analyze a website -

    -
    -
    - - -
    - - 🚀 Start Analysis - - {formError && } - - {isLoggedIn === false && ( -
    -

    - You'll need to sign in to analyze websites. When you click the - button, you'll be redirected to the sign-in page. -

    -
    - )} -
    - ); -} diff --git a/examples/playground/eslint.config.cjs b/examples/playground/eslint.config.cjs deleted file mode 100644 index 81d5a8e11..000000000 --- a/examples/playground/eslint.config.cjs +++ /dev/null @@ -1,8 +0,0 @@ -const baseConfig = require('../../eslint.config.cjs'); - -module.exports = [ - ...baseConfig, - { - ignores: ['.next/**', 'supabase/functions/**/*.ts'], - }, -]; diff --git a/examples/playground/lib/db.ts b/examples/playground/lib/db.ts deleted file mode 100644 index 54f6151de..000000000 --- a/examples/playground/lib/db.ts +++ /dev/null @@ -1,9 +0,0 @@ -import { Database } from '@/supabase/functions/database-types'; - -// Keep only the database type exports that might be needed elsewhere -export type RunRow = Database['pgflow']['Tables']['runs']['Row']; -export type StepStateRow = Database['pgflow']['Tables']['step_states']['Row']; -export type StepTaskRow = Database['pgflow']['Tables']['step_tasks']['Row']; - -// Note: The observation functions and custom types have been removed -// as they are now handled by the @pgflow/client library \ No newline at end of file diff --git a/examples/playground/lib/example-links.ts b/examples/playground/lib/example-links.ts deleted file mode 100644 index e2f0f622f..000000000 --- a/examples/playground/lib/example-links.ts +++ /dev/null @@ -1,37 +0,0 @@ -/** - * Common example links for website analysis - */ - -export interface ExampleLink { - url: string; - label: string; - variant: 'success' | 'failure'; -} - -export const exampleLinks: ExampleLink[] = [ - { - url: 'https://en.wikipedia.org/wiki/PostgreSQL', - label: 'PostgreSQL Wikipedia', - variant: 'success', - }, - { - url: 'https://supabase.com/docs', - label: 'Supabase Docs', - variant: 'success', - }, - { - url: 'https://pgflow.dev', - label: 'pgflow.dev', - variant: 'success', - }, - // { - // url: 'https://aws.amazon.com/', - // label: '50% failure', - // variant: 'failure', - // }, - { - url: 'https://firebase.google.com/', - label: '100% failure', - variant: 'failure', - }, -]; diff --git a/examples/playground/lib/hooks/use-elapsed-time.ts b/examples/playground/lib/hooks/use-elapsed-time.ts deleted file mode 100644 index 47f2a2eab..000000000 --- a/examples/playground/lib/hooks/use-elapsed-time.ts +++ /dev/null @@ -1,97 +0,0 @@ -import { useEffect, useRef, useCallback } from 'react'; - -// Format relative time in a concise way (e.g., "3s ago", "5m ago") -function formatRelativeTime( - date: Date | null, - now: Date = new Date(), -): string { - if (!date) return ''; - - const then = date; - const diffMs = now.getTime() - then.getTime(); - const diffSec = Math.floor(diffMs / 1000); - - // Handle case where time difference is negative (server/client time mismatch) - if (diffSec < 1) { - return '0s'; - } - - if (diffSec < 60) { - return `${diffSec}s`; - } - - const minutes = Math.floor(diffSec / 60); - - if (minutes < 60) { - return `${minutes}m`; - } - - const hours = Math.floor(minutes / 60); - - if (hours < 24) { - return `${hours}h`; - } - - const days = Math.floor(hours / 24); - return `${days}d`; -} - -/** - * Hook that returns a ref callback to attach to elements that need elapsed time updates. - * This avoids re-rendering the entire component tree when the timer ticks. - */ -export function useElapsedTime(startDate: Date | null) { - const elementRefs = useRef>(new Set()); - const intervalRef = useRef | undefined>(undefined); - - // Update all registered elements with the current elapsed time - const updateElements = useCallback(() => { - if (!startDate) return; - - const now = new Date(); - const timeString = `${formatRelativeTime(startDate, now)} ago`; - - elementRefs.current.forEach(element => { - if (element.textContent !== timeString) { - element.textContent = timeString; - } - }); - }, [startDate]); - - // Ref callback to register/unregister elements - const refCallback = useCallback((element: HTMLElement | null) => { - if (element) { - elementRefs.current.add(element); - // Update immediately when element is registered - if (startDate) { - const now = new Date(); - element.textContent = `${formatRelativeTime(startDate, now)} ago`; - } - } else { - // Element is being unmounted, remove from set - elementRefs.current.forEach(el => { - if (!document.contains(el)) { - elementRefs.current.delete(el); - } - }); - } - }, [startDate]); - - useEffect(() => { - if (!startDate) return; - - // Update immediately - updateElements(); - - // Set up interval to update every second - intervalRef.current = setInterval(updateElements, 1000); - - return () => { - if (intervalRef.current) { - clearInterval(intervalRef.current); - } - }; - }, [startDate, updateElements]); - - return refCallback; -} \ No newline at end of file diff --git a/examples/playground/lib/hooks/use-start-analysis.ts b/examples/playground/lib/hooks/use-start-analysis.ts deleted file mode 100644 index f40de8369..000000000 --- a/examples/playground/lib/hooks/use-start-analysis.ts +++ /dev/null @@ -1,38 +0,0 @@ -// lib/hooks/use-start-analysis.ts -'use client'; - -import { useState } from 'react'; -import { useRouter } from 'next/navigation'; -import { startWebsiteAnalysis } from '@/lib/services/start-analysis'; -import { usePgflowClient } from '@/lib/pgflow-client-provider'; - -export function useStartAnalysis() { - const router = useRouter(); - const pgflow = usePgflowClient(); - const [error, setError] = useState(null); - const [isPending, setIsPending] = useState(false); - - async function start(url: string) { - setError(null); - setIsPending(true); - - try { - const flowRun = await startWebsiteAnalysis(url, {}, pgflow); - // Navigate immediately - PgflowClient already has the run cached - router.push(`/websites/runs/${flowRun.run_id}`); - } catch (err) { - const error = err as {code?: string; message?: string}; - if (error?.code === 'AUTH_REQUIRED') { - // we want to remember the url and redirect - localStorage.setItem('pendingAnalysisUrl', url); - router.push('/sign-in'); - return; - } - setError(error.message ?? 'Something went wrong'); - } finally { - setIsPending(false); - } - } - - return { start, error, isPending }; -} \ No newline at end of file diff --git a/examples/playground/lib/pgflow-client-provider.tsx b/examples/playground/lib/pgflow-client-provider.tsx deleted file mode 100644 index 11bf96478..000000000 --- a/examples/playground/lib/pgflow-client-provider.tsx +++ /dev/null @@ -1,31 +0,0 @@ -'use client'; - -import React, { createContext, useContext, useRef, ReactNode } from 'react'; -import { PgflowClient } from '@pgflow/client'; -import { createClient } from '@/utils/supabase/client'; - -const PgflowClientContext = createContext(null); - -export function PgflowClientProvider({ children }: { children: ReactNode }) { - // Use a ref to ensure the client instance is stable across renders - const clientRef = useRef(null); - - if (!clientRef.current) { - const supabase = createClient(); - clientRef.current = new PgflowClient(supabase); - } - - return ( - - {children} - - ); -} - -export function usePgflowClient() { - const client = useContext(PgflowClientContext); - if (!client) { - throw new Error('usePgflowClient must be used within PgflowClientProvider'); - } - return client; -} \ No newline at end of file diff --git a/examples/playground/lib/services/start-analysis.ts b/examples/playground/lib/services/start-analysis.ts deleted file mode 100644 index 1ca9eb2e5..000000000 --- a/examples/playground/lib/services/start-analysis.ts +++ /dev/null @@ -1,54 +0,0 @@ -// lib/services/start-analysis.ts -import { createClient } from '@/utils/supabase/client'; -import type { FlowRun, PgflowClient } from '@pgflow/client'; -import type { AnyFlow } from '@pgflow/dsl'; - -export interface StartAnalysisOptions { - /** - * If true, throws an AuthRequiredError when the user is not logged in. - * If false, function continues but you can handle unauthenticated state yourself. - */ - requireAuth?: boolean; - /** - * Predetermined run id (useful for optimistic UI / testing). - */ - runId?: string; -} - -/** - * Starts analyse-website pgflow run and returns the FlowRun instance. - * **This is the ONLY place that knows HOW we start a flow.** - */ -export async function startWebsiteAnalysis( - url: string, - { requireAuth = true, runId }: StartAnalysisOptions = {}, - pgflow: PgflowClient -): Promise> { - if (!url) throw new Error('URL is required'); - if (!pgflow) throw new Error('PgflowClient is required'); - - const supabase = createClient(); - - // optional auth guard - if (requireAuth) { - const { data } = await supabase.auth.getUser(); - if (!data.user) { - const err = new Error('AUTH_REQUIRED') as Error & {code?: string}; - // tiny custom error class makes catching easier - err.code = 'AUTH_REQUIRED'; - throw err; - } - } - - // Get the user ID for the flow input - const { data: userData } = await supabase.auth.getUser(); - const userId = userData.user?.id; - - const flowRun = await pgflow.startFlow( - 'analyze_website', - { url, user_id: userId }, - runId - ); - - return flowRun; -} \ No newline at end of file diff --git a/examples/playground/lib/utils.ts b/examples/playground/lib/utils.ts deleted file mode 100644 index a5ef19350..000000000 --- a/examples/playground/lib/utils.ts +++ /dev/null @@ -1,6 +0,0 @@ -import { clsx, type ClassValue } from "clsx"; -import { twMerge } from "tailwind-merge"; - -export function cn(...inputs: ClassValue[]) { - return twMerge(clsx(inputs)); -} diff --git a/examples/playground/middleware.ts b/examples/playground/middleware.ts deleted file mode 100644 index 53428f8c9..000000000 --- a/examples/playground/middleware.ts +++ /dev/null @@ -1,20 +0,0 @@ -import { type NextRequest } from "next/server"; -import { updateSession } from "@/utils/supabase/middleware"; - -export async function middleware(request: NextRequest) { - return await updateSession(request); -} - -export const config = { - matcher: [ - /* - * Match all request paths except: - * - _next/static (static files) - * - _next/image (image optimization files) - * - favicon.ico (favicon file) - * - images - .svg, .png, .jpg, .jpeg, .gif, .webp - * Feel free to modify this pattern to include more paths. - */ - "/((?!_next/static|_next/image|favicon.ico|.*\\.(?:svg|png|jpg|jpeg|gif|webp)$).*)", - ], -}; diff --git a/examples/playground/next.config.ts b/examples/playground/next.config.ts deleted file mode 100644 index ef7a43c4c..000000000 --- a/examples/playground/next.config.ts +++ /dev/null @@ -1,33 +0,0 @@ -import type { NextConfig } from 'next'; -import { composePlugins, withNx } from '@nx/next'; -import { withPlausibleProxy } from 'next-plausible'; - -const nextConfig: NextConfig = { - experimental: { - externalDir: true, - }, - transpilePackages: ['@pgflow/client', '@pgflow/dsl'], - nx: { - svgr: false, // Disable deprecated SVGR support - }, - webpack: (config, { isServer }) => { - if (!isServer) { - // Provide fallbacks for Node.js modules that ws tries to use - config.resolve.fallback = { - ...config.resolve.fallback, - net: false, - tls: false, - fs: false, - crypto: false, - }; - } - - return config; - }, -}; - -// Compose plugins properly - withNx handles workspace library resolution -export default composePlugins( - withNx, - withPlausibleProxy() -)(nextConfig); \ No newline at end of file diff --git a/examples/playground/package.json b/examples/playground/package.json deleted file mode 100644 index 2163bc55a..000000000 --- a/examples/playground/package.json +++ /dev/null @@ -1,43 +0,0 @@ -{ - "name": "playground", - "private": true, - "scripts": { - "build": "next build", - "start": "next start" - }, - "dependencies": { - "@pgflow/client": "workspace:*", - "@pgflow/dsl": "workspace:*", - "@radix-ui/react-checkbox": "^1.1.1", - "@radix-ui/react-collapsible": "^1.1.8", - "@radix-ui/react-dropdown-menu": "^2.1.1", - "@radix-ui/react-label": "^2.1.0", - "@radix-ui/react-slot": "^1.1.0", - "@supabase/ssr": "latest", - "@supabase/supabase-js": "2.49.4", - "@types/react": "19.1.9", - "@types/react-dom": "19.0.2", - "@types/uuid": "10.0.0", - "autoprefixer": "10.4.20", - "class-variance-authority": "^0.7.0", - "clsx": "^2.1.1", - "framer-motion": "^12.9.2", - "lucide-react": "^0.468.0", - "nanoevents": "7.0.1", - "next": "15.0.3", - "next-plausible": "^3.12.4", - "next-themes": "^0.4.3", - "postcss": "8.4.49", - "postgres": "3.4.5", - "react": "19.0.0", - "react-dom": "19.0.0", - "sharp": "0.32.6", - "supabase": "2.21.1", - "tailwind-merge": "2.6.0", - "tailwindcss": "3.4.17", - "tailwindcss-animate": "1.0.7", - "terser": "5.43.1", - "typescript": "5.8.3", - "uuid": "9.0.1" - } -} \ No newline at end of file diff --git a/examples/playground/postcss.config.js b/examples/playground/postcss.config.js deleted file mode 100644 index 12a703d90..000000000 --- a/examples/playground/postcss.config.js +++ /dev/null @@ -1,6 +0,0 @@ -module.exports = { - plugins: { - tailwindcss: {}, - autoprefixer: {}, - }, -}; diff --git a/examples/playground/project.json b/examples/playground/project.json deleted file mode 100644 index c76519561..000000000 --- a/examples/playground/project.json +++ /dev/null @@ -1,71 +0,0 @@ -{ - "name": "playground", - "$schema": "../../node_modules/nx/schemas/project-schema.json", - "sourceRoot": "examples/playground", - "projectType": "application", - "tags": [], - "targets": { - "build": { - "executor": "@nx/next:build", - "outputs": ["{projectRoot}/.next"], - "defaultConfiguration": "production", - "options": { - "outputPath": "{projectRoot}" - }, - "configurations": { - "development": { - "outputPath": "{projectRoot}" - }, - "production": { - "outputPath": "{projectRoot}" - } - } - }, - "bundle": { - "executor": "nx:run-commands", - "dependsOn": ["build"], - "options": { - "cwd": "{projectRoot}", - "command": "pnpm netlify build --context=${NETLIFY_CONTEXT:-production}" - }, - "outputs": ["{projectRoot}/.netlify"] - }, - "deploy:preview": { - "executor": "nx:run-commands", - "dependsOn": ["bundle"], - "options": { - "cwd": "{projectRoot}", - "command": "pnpm netlify deploy --dir=.netlify/dist --functions=.netlify/functions --alias=${GITHUB_SHA:-preview}" - } - }, - "deploy:prod": { - "executor": "nx:run-commands", - "dependsOn": ["bundle"], - "options": { - "cwd": "{projectRoot}", - "command": "pnpm netlify deploy --dir=.netlify/dist --functions=.netlify/functions --prod" - } - }, - "sync-edge-deps": { - "executor": "nx:run-commands", - "dependsOn": [ - { - "projects": ["core", "dsl"], - "target": "build" - } - ], - "options": { - "command": "./scripts/sync-edge-deps.sh", - "cwd": "examples/playground" - } - }, - "start-functions": { - "executor": "nx:run-commands", - "dependsOn": ["sync-edge-deps"], - "options": { - "command": "./scripts/supabase functions serve --import-map ./supabase/functions/import_map.local.json", - "cwd": "examples/playground" - } - } - } -} diff --git a/examples/playground/public/.gitkeep b/examples/playground/public/.gitkeep deleted file mode 100644 index e69de29bb..000000000 diff --git a/examples/playground/public/favicon.ico b/examples/playground/public/favicon.ico deleted file mode 100644 index 718d6fea4..000000000 Binary files a/examples/playground/public/favicon.ico and /dev/null differ diff --git a/examples/playground/public/opengraph-image.png b/examples/playground/public/opengraph-image.png deleted file mode 100644 index 57595e662..000000000 Binary files a/examples/playground/public/opengraph-image.png and /dev/null differ diff --git a/examples/playground/public/twitter-image.png b/examples/playground/public/twitter-image.png deleted file mode 100644 index 57595e662..000000000 Binary files a/examples/playground/public/twitter-image.png and /dev/null differ diff --git a/examples/playground/scripts/run_sql.sh b/examples/playground/scripts/run_sql.sh deleted file mode 100755 index 3dc37492c..000000000 --- a/examples/playground/scripts/run_sql.sh +++ /dev/null @@ -1,30 +0,0 @@ -#!/bin/bash - -# Script to run SQL queries using the local Supabase database -# Usage: -# ./scripts/run_sql.sh "SELECT * FROM table;" -# ./scripts/run_sql.sh -c "SELECT * FROM table;" -# ./scripts/run_sql.sh -f file.sql -# echo "SELECT 1;" | ./scripts/run_sql.sh - -SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" - -# Get database URL from supabase status -DB_URL=$("$SCRIPT_DIR/supabase" status --output json 2>/dev/null | jq -r '.DB_URL') - -if [ -z "$DB_URL" ] || [ "$DB_URL" = "null" ]; then - echo "Error: Could not get database URL from supabase status" >&2 - echo "Make sure supabase is running: npm run supabase:start" >&2 - exit 1 -fi - -# If no arguments provided but stdin is available, use stdin -if [ $# -eq 0 ] && [ ! -t 0 ]; then - psql "$DB_URL" -# If a single argument without -c or -f flag, assume it's a SQL command -elif [ $# -eq 1 ] && [[ ! "$1" =~ ^- ]]; then - psql "$DB_URL" -c "$1" -else - # Otherwise pass all arguments through to psql - psql "$DB_URL" "$@" -fi \ No newline at end of file diff --git a/examples/playground/scripts/supabase b/examples/playground/scripts/supabase deleted file mode 100755 index d4525b2a4..000000000 --- a/examples/playground/scripts/supabase +++ /dev/null @@ -1,11 +0,0 @@ -#!/bin/bash - -# Source the environment variables -source .env - -# Export required variables for Supabase -export GITHUB_OAUTH_CLIENT_ID -export GITHUB_OAUTH_CLIENT_SECRET - -# Execute supabase with all arguments passed through -pnpm exec supabase "$@" \ No newline at end of file diff --git a/examples/playground/scripts/sync-edge-deps.sh b/examples/playground/scripts/sync-edge-deps.sh deleted file mode 100755 index d8c934b75..000000000 --- a/examples/playground/scripts/sync-edge-deps.sh +++ /dev/null @@ -1,73 +0,0 @@ -#!/usr/bin/env bash -set -euo pipefail - -SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" -PLAYGROUND_DIR="$(dirname "$SCRIPT_DIR")" -MONOREPO_ROOT="$(cd "$PLAYGROUND_DIR/../.." && pwd)" -VENDOR_DIR="$PLAYGROUND_DIR/supabase/functions/_vendor" - -echo "🔄 Syncing edge function dependencies for local development..." - -# Clean and create vendor directory -rm -rf "$VENDOR_DIR" -mkdir -p "$VENDOR_DIR/@pgflow" - -# Verify builds succeeded -if [ ! -d "$MONOREPO_ROOT/pkgs/core/dist" ]; then - echo "❌ Error: core package build failed - dist directory not found" - exit 1 -fi - -if [ ! -d "$MONOREPO_ROOT/pkgs/dsl/dist" ]; then - echo "❌ Error: dsl package build failed - dist directory not found" - exit 1 -fi - -# Copy core package -echo "📋 Copying @pgflow/core..." -mkdir -p "$VENDOR_DIR/@pgflow/core" -cp -r "$MONOREPO_ROOT/pkgs/core/dist/"* "$VENDOR_DIR/@pgflow/core/" -cp "$MONOREPO_ROOT/pkgs/core/package.json" "$VENDOR_DIR/@pgflow/core/" - -# Copy dsl package -echo "📋 Copying @pgflow/dsl..." -mkdir -p "$VENDOR_DIR/@pgflow/dsl" -cp -r "$MONOREPO_ROOT/pkgs/dsl/dist/"* "$VENDOR_DIR/@pgflow/dsl/" -cp "$MONOREPO_ROOT/pkgs/dsl/package.json" "$VENDOR_DIR/@pgflow/dsl/" - -# Copy edge-worker source (not built) - preserving directory structure -echo "📋 Copying @pgflow/edge-worker..." -mkdir -p "$VENDOR_DIR/@pgflow/edge-worker" -# Copy the entire src directory to maintain relative imports -cp -r "$MONOREPO_ROOT/pkgs/edge-worker/src" "$VENDOR_DIR/@pgflow/edge-worker/" - -# Simple fix: replace .js with .ts in imports -find "$VENDOR_DIR/@pgflow/edge-worker" -name "*.ts" -type f -exec sed -i 's/\.js"/\.ts"/g' {} + -find "$VENDOR_DIR/@pgflow/edge-worker" -name "*.ts" -type f -exec sed -i "s/\.js'/\.ts'/g" {} + - -# Create a redirect index.ts at the root that points to src/index.ts -cat > "$VENDOR_DIR/@pgflow/edge-worker/index.ts" << 'EOF' -// Re-export from the src directory to maintain compatibility -export * from './src/index.ts'; -EOF - -# Create _internal.ts redirect as well since edge-worker exports this path -cat > "$VENDOR_DIR/@pgflow/edge-worker/_internal.ts" << 'EOF' -// Re-export from the src directory to maintain compatibility -export * from './src/_internal.ts'; -EOF - -# Verify key files exist -if [ ! -f "$VENDOR_DIR/@pgflow/core/index.js" ]; then - echo "⚠️ Warning: @pgflow/core/index.js not found after copy" -fi - -if [ ! -f "$VENDOR_DIR/@pgflow/dsl/index.js" ]; then - echo "⚠️ Warning: @pgflow/dsl/index.js not found after copy" -fi - -if [ ! -f "$VENDOR_DIR/@pgflow/edge-worker/src/index.ts" ]; then - echo "⚠️ Warning: @pgflow/edge-worker/src/index.ts not found after copy" -fi - -echo "✅ Dependencies synced to $VENDOR_DIR" \ No newline at end of file diff --git a/examples/playground/supabase/.gitignore b/examples/playground/supabase/.gitignore deleted file mode 100644 index ad9264f0b..000000000 --- a/examples/playground/supabase/.gitignore +++ /dev/null @@ -1,8 +0,0 @@ -# Supabase -.branches -.temp - -# dotenvx -.env.keys -.env.local -.env.*.local diff --git a/examples/playground/supabase/config.toml b/examples/playground/supabase/config.toml deleted file mode 100644 index e7de7b3c9..000000000 --- a/examples/playground/supabase/config.toml +++ /dev/null @@ -1,342 +0,0 @@ -# For detailed configuration reference documentation, visit: -# https://supabase.com/docs/guides/local-development/cli/config -# A string used to distinguish different Supabase projects on the same host. Defaults to the -# working directory name when running `supabase init`. -project_id = "pgflow-demo" - -[api] -enabled = true -# Port to use for the API URL. -port = 54321 -# Schemas to expose in your API. Tables, views and stored procedures in this schema will get API -# endpoints. `public` and `graphql_public` schemas are included by default. -schemas = ["public", "graphql_public", "pgflow"] -# Extra schemas to add to the search_path of every request. -extra_search_path = ["public", "extensions", "pgflow"] -# The maximum number of rows returns from a view, table, or stored procedure. Limits payload size -# for accidental or malicious requests. -max_rows = 1000 - -[api.tls] -# Enable HTTPS endpoints locally using a self-signed certificate. -enabled = false - -[db] -# Port to use for the local database URL. -port = 54322 -# Port used by db diff command to initialize the shadow database. -shadow_port = 54320 -# The database major version to use. This has to be the same as your remote database's. Run `SHOW -# server_version;` on the remote database to check. -major_version = 15 - -[db.pooler] -enabled = true -# Port to use for the local connection pooler. -port = 54329 -# Specifies when a server connection can be reused by other clients. -# Configure one of the supported pooler modes: `transaction`, `session`. -pool_mode = "transaction" -# How many server connections to allow per user/database pair. -default_pool_size = 20 -# Maximum number of client connections allowed. -max_client_conn = 100 - -# [db.vault] -# secret_key = "env(SECRET_VALUE)" - -[db.migrations] -# Specifies an ordered list of schema files that describe your database. -# Supports glob patterns relative to supabase directory: "./schemas/*.sql" -schema_paths = [] - -[db.seed] -# If enabled, seeds the database after migrations during a db reset. -enabled = true -# Specifies an ordered list of seed files to load during db reset. -# Supports glob patterns relative to supabase directory: "./seeds/*.sql" -sql_paths = ["./seed.sql"] - -[realtime] -enabled = true -# Bind realtime via either IPv4 or IPv6. (default: IPv4) -# ip_version = "IPv6" -# The maximum length in bytes of HTTP request headers. (default: 4096) -# max_header_length = 4096 - -[studio] -enabled = true -# Port to use for Supabase Studio. -port = 54323 -# External URL of the API server that frontend connects to. -api_url = "http://127.0.0.1" -# OpenAI API Key to use for Supabase AI in the Supabase Studio. -openai_api_key = "env(OPENAI_API_KEY)" - -# Email testing server. Emails sent with the local dev setup are not actually sent - rather, they -# are monitored, and you can view the emails that would have been sent from the web interface. -[inbucket] -enabled = true -# Port to use for the email testing server web interface. -port = 54324 -# Uncomment to expose additional ports for testing user applications that send emails. -# smtp_port = 54325 -# pop3_port = 54326 -# admin_email = "admin@email.com" -# sender_name = "Admin" - -[storage] -enabled = true -# The maximum file size allowed (e.g. "5MB", "500KB"). -file_size_limit = "50MiB" - -# Image transformation API is available to Supabase Pro plan. -# [storage.image_transformation] -# enabled = true - -# Uncomment to configure local storage buckets -# [storage.buckets.images] -# public = false -# file_size_limit = "50MiB" -# allowed_mime_types = ["image/png", "image/jpeg"] -# objects_path = "./images" - -[auth] -enabled = true -# The base URL of your website. Used as an allow-list for redirects and for constructing URLs used -# in emails. -site_url = "http://localhost:3000" -# A list of *exact* URLs that auth providers are permitted to redirect to post authentication. -additional_redirect_urls = ["http://127.0.0.1:3000", "https://127.0.0.1:3000", "http://localhost:3000", "https://localhost:3000"] -# How long tokens are valid for, in seconds. Defaults to 3600 (1 hour), maximum 604,800 (1 week). -jwt_expiry = 3600 -# If disabled, the refresh token will never expire. -enable_refresh_token_rotation = true -# Allows refresh tokens to be reused after expiry, up to the specified interval in seconds. -# Requires enable_refresh_token_rotation = true. -refresh_token_reuse_interval = 10 -# Allow/disallow new user signups to your project. -enable_signup = true -# Allow/disallow anonymous sign-ins to your project. -enable_anonymous_sign_ins = false -# Allow/disallow testing manual linking of accounts -enable_manual_linking = false -# Passwords shorter than this value will be rejected as weak. Minimum 6, recommended 8 or more. -minimum_password_length = 6 -# Passwords that do not meet the following requirements will be rejected as weak. Supported values -# are: `letters_digits`, `lower_upper_letters_digits`, `lower_upper_letters_digits_symbols` -password_requirements = "" - -[auth.rate_limit] -# Number of emails that can be sent per hour. Requires auth.email.smtp to be enabled. -email_sent = 2 -# Number of SMS messages that can be sent per hour. Requires auth.sms to be enabled. -sms_sent = 30 -# Number of anonymous sign-ins that can be made per hour per IP address. Requires enable_anonymous_sign_ins = true. -anonymous_users = 30 -# Number of sessions that can be refreshed in a 5 minute interval per IP address. -token_refresh = 150 -# Number of sign up and sign-in requests that can be made in a 5 minute interval per IP address (excludes anonymous users). -sign_in_sign_ups = 30 -# Number of OTP / Magic link verifications that can be made in a 5 minute interval per IP address. -token_verifications = 30 - -# Configure one of the supported captcha providers: `hcaptcha`, `turnstile`. -# [auth.captcha] -# enabled = true -# provider = "hcaptcha" -# secret = "" - -[auth.email] -# Allow/disallow new user signups via email to your project. -enable_signup = true -# If enabled, a user will be required to confirm any email change on both the old, and new email -# addresses. If disabled, only the new email is required to confirm. -double_confirm_changes = true -# If enabled, users need to confirm their email address before signing in. -enable_confirmations = false -# If enabled, users will need to reauthenticate or have logged in recently to change their password. -secure_password_change = false -# Controls the minimum amount of time that must pass before sending another signup confirmation or password reset email. -max_frequency = "1s" -# Number of characters used in the email OTP. -otp_length = 6 -# Number of seconds before the email OTP expires (defaults to 1 hour). -otp_expiry = 3600 - -# Use a production-ready SMTP server -# [auth.email.smtp] -# enabled = true -# host = "smtp.sendgrid.net" -# port = 587 -# user = "apikey" -# pass = "env(SENDGRID_API_KEY)" -# admin_email = "admin@email.com" -# sender_name = "Admin" - -# Uncomment to customize email template -# [auth.email.template.invite] -# subject = "You have been invited" -# content_path = "./supabase/templates/invite.html" - -[auth.sms] -# Allow/disallow new user signups via SMS to your project. -enable_signup = false -# If enabled, users need to confirm their phone number before signing in. -enable_confirmations = false -# Template for sending OTP to users -template = "Your code is {{ .Code }}" -# Controls the minimum amount of time that must pass before sending another sms otp. -max_frequency = "5s" - -# Use pre-defined map of phone number to OTP for testing. -# [auth.sms.test_otp] -# 4152127777 = "123456" - -# Configure logged in session timeouts. -# [auth.sessions] -# Force log out after the specified duration. -# timebox = "24h" -# Force log out if the user has been inactive longer than the specified duration. -# inactivity_timeout = "8h" - -# This hook runs before a token is issued and allows you to add additional claims based on the authentication method used. -# [auth.hook.custom_access_token] -# enabled = true -# uri = "pg-functions:////" - -# Configure one of the supported SMS providers: `twilio`, `twilio_verify`, `messagebird`, `textlocal`, `vonage`. -[auth.sms.twilio] -enabled = false -account_sid = "" -message_service_sid = "" -# DO NOT commit your Twilio auth token to git. Use environment variable substitution instead: -auth_token = "env(SUPABASE_AUTH_SMS_TWILIO_AUTH_TOKEN)" - -# Multi-factor-authentication is available to Supabase Pro plan. -[auth.mfa] -# Control how many MFA factors can be enrolled at once per user. -max_enrolled_factors = 10 - -# Control MFA via App Authenticator (TOTP) -[auth.mfa.totp] -enroll_enabled = false -verify_enabled = false - -# Configure MFA via Phone Messaging -[auth.mfa.phone] -enroll_enabled = false -verify_enabled = false -otp_length = 6 -template = "Your code is {{ .Code }}" -max_frequency = "5s" - -# Configure MFA via WebAuthn -# [auth.mfa.web_authn] -# enroll_enabled = true -# verify_enabled = true - -[auth.external.github] -enabled = true -client_id = "env(GITHUB_OAUTH_CLIENT_ID)" -secret = "env(GITHUB_OAUTH_CLIENT_SECRET)" -# Using the default callback URL for Supabase GitHub OAuth -redirect_uri = "http://localhost:54321/auth/v1/callback" - -# Use an external OAuth provider. The full list of providers are: `apple`, `azure`, `bitbucket`, -# `discord`, `facebook`, `github`, `gitlab`, `google`, `keycloak`, `linkedin_oidc`, `notion`, `twitch`, -# `twitter`, `slack`, `spotify`, `workos`, `zoom`. -[auth.external.apple] -enabled = false -client_id = "" -# DO NOT commit your OAuth provider secret to git. Use environment variable substitution instead: -secret = "env(SUPABASE_AUTH_EXTERNAL_APPLE_SECRET)" -# Overrides the default auth redirectUrl. -redirect_uri = "" -# Overrides the default auth provider URL. Used to support self-hosted gitlab, single-tenant Azure, -# or any other third-party OIDC providers. -url = "" -# If enabled, the nonce check will be skipped. Required for local sign in with Google auth. -skip_nonce_check = false - -# Use Firebase Auth as a third-party provider alongside Supabase Auth. -[auth.third_party.firebase] -enabled = false -# project_id = "my-firebase-project" - -# Use Auth0 as a third-party provider alongside Supabase Auth. -[auth.third_party.auth0] -enabled = false -# tenant = "my-auth0-tenant" -# tenant_region = "us" - -# Use AWS Cognito (Amplify) as a third-party provider alongside Supabase Auth. -[auth.third_party.aws_cognito] -enabled = false -# user_pool_id = "my-user-pool-id" -# user_pool_region = "us-east-1" - -# Use Clerk as a third-party provider alongside Supabase Auth. -[auth.third_party.clerk] -enabled = false -# Obtain from https://clerk.com/setup/supabase -# domain = "example.clerk.accounts.dev" - -[edge_runtime] -enabled = true -# Configure one of the supported request policies: `oneshot`, `per_worker`. -# Use `oneshot` for hot reload, or `per_worker` for load testing. -policy = "per_worker" -# Port to attach the Chrome inspector for debugging edge functions. -inspector_port = 8083 -# The Deno major version to use. -deno_version = 1 - -# [edge_runtime.secrets] -# secret_key = "env(SECRET_VALUE)" - -[analytics] -enabled = true -port = 54327 -# Configure one of the supported backends: `postgres`, `bigquery`. -backend = "postgres" - -# Experimental features may be deprecated any time -[experimental] -# Configures Postgres storage engine to use OrioleDB (S3) -orioledb_version = "" -# Configures S3 bucket URL, eg. .s3-.amazonaws.com -s3_host = "env(S3_HOST)" -# Configures S3 bucket region, eg. us-east-1 -s3_region = "env(S3_REGION)" -# Configures AWS_ACCESS_KEY_ID for S3 bucket -s3_access_key = "env(S3_ACCESS_KEY)" -# Configures AWS_SECRET_ACCESS_KEY for S3 bucket -s3_secret_key = "env(S3_SECRET_KEY)" - -[functions.analyze_website_worker_0] -enabled = true -verify_jwt = false -import_map = "./functions/deno.json" -entrypoint = "./functions/analyze_website_worker_0/index.ts" -[functions.analyze_website_worker_1] -enabled = true -verify_jwt = false -import_map = "./functions/deno.json" -entrypoint = "./functions/analyze_website_worker_1/index.ts" -[functions.analyze_website_worker_2] -enabled = true -verify_jwt = false -import_map = "./functions/deno.json" -entrypoint = "./functions/analyze_website_worker_2/index.ts" -[functions.analyze_website_worker_3] -enabled = true -verify_jwt = false -import_map = "./functions/deno.json" -entrypoint = "./functions/analyze_website_worker_3/index.ts" - -[functions.pgflow-cron-worker] -enabled = true -verify_jwt = false -import_map = "./functions/deno.json" -entrypoint = "./functions/pgflow-cron-worker/index.ts" diff --git a/examples/playground/supabase/functions/.env.example b/examples/playground/supabase/functions/.env.example deleted file mode 100644 index 690038350..000000000 --- a/examples/playground/supabase/functions/.env.example +++ /dev/null @@ -1 +0,0 @@ -GROQ_API_KEY="" diff --git a/examples/playground/supabase/functions/LOCAL_DEV_README.md b/examples/playground/supabase/functions/LOCAL_DEV_README.md deleted file mode 100644 index 0142d5545..000000000 --- a/examples/playground/supabase/functions/LOCAL_DEV_README.md +++ /dev/null @@ -1,195 +0,0 @@ -# Edge Functions Local Development Guide - -## Quick Start - -To use local pgflow packages during development: - -```bash -# Terminal 1: Start Supabase -pnpm start-supabase - -# Terminal 2: Start Edge Functions (automatically syncs local packages) -pnpm start-functions -``` - -Your edge functions will now use local versions of `@pgflow/core`, `@pgflow/dsl`, and `@pgflow/edge-worker`. - -**Note:** When you make changes to any pgflow packages, restart the edge functions (`Ctrl+C` and run `pnpm start-functions` again) to sync the latest changes. - -## Why This Setup? - -The pgflow monorepo has a unique challenge: - -1. **@pgflow/edge-worker** is published to JSR (JavaScript Registry) as TypeScript source -2. **@pgflow/core** and **@pgflow/dsl** are published to npm as compiled JavaScript -3. Edge Functions run in Docker containers that can't access files outside `supabase/functions/` - -During development, we need to use unpublished local versions of these packages, which requires copying them into the Edge Functions directory. - -## How It Works - -### Import Mapping Strategy - -We use different import configurations for production and local development: - -1. **Production** (`deno.json`): - - Contains imports pointing to published packages on npm/jsr - - This is what gets deployed to Supabase - -2. **Local Development** (`import_map.local.json`): - - Contains imports pointing to the local vendor directory - - Used via the `--import-map` flag which overrides `deno.json` - -### The --import-map Flag - -When you run `pnpm start-functions`, it executes: -```bash -supabase functions serve --import-map ./supabase/functions/import_map.local.json -``` - -The `--import-map` flag **completely overrides** any imports defined in `deno.json`. This allows us to: -- Keep production configuration as the default (safe for deployments) -- Use local packages only when explicitly running with the flag -- Avoid any git status pollution from modifying tracked files - -### Vendor Directory - -The sync script copies packages into `_vendor/`: - -``` -_vendor/ -├── @pgflow/ -│ ├── core/ # Compiled JavaScript from pkgs/core/dist -│ ├── dsl/ # Compiled JavaScript from pkgs/dsl/dist -│ └── edge-worker/ # TypeScript source from pkgs/edge-worker/src -``` - -### Import Extension Fix - -Edge-worker source files use `.js` extensions for JSR compatibility: -```typescript -// Original in edge-worker -import { foo } from './bar.js'; -``` - -The sync script automatically converts these to `.ts` for local development: -```typescript -// After sync -import { foo } from './bar.ts'; -``` - -## Manual Operations - -### Sync Dependencies Once - -```bash -pnpm nx sync-edge-deps playground -``` - -### Test with Production Packages - -To test with published packages: - -```bash -pnpm nx start-functions:prod playground -``` - -This will use the imports defined in `deno.json` (production packages) without any local overrides. - -## Caveats and Limitations - -### 1. Import Extensions -- Edge-worker uses `.js` extensions in imports for JSR -- These are automatically converted to `.ts` during sync -- This means the vendor files differ from source files - -### 2. Build Requirements -- You must build core and dsl packages before syncing -- The sync script does this automatically -- Build failures will prevent syncing - -### 3. Type Definitions -- TypeScript may have issues with complex type imports -- Some type-only imports might need adjustment -- The compiled packages include `.d.ts` files - -### 4. Subpath Imports -- Imports like `@pgflow/dsl/supabase` need explicit mapping -- Add these to `import_map.json` as needed: - ```json - "@pgflow/dsl/supabase": "./_vendor/@pgflow/dsl/supabase.js" - ``` - -### 5. Docker Isolation -- All dependencies must be inside `supabase/functions/` -- Symlinks don't work reliably with Docker -- File changes might require function restart - -### 6. Version Mismatch -- Local packages might differ from published versions -- Test with production config before deploying -- Keep `deno.prod.json` updated with latest versions - -## Troubleshooting - -### Module Not Found Errors - -If you see "Module not found" errors: - -1. Check if the import needs a subpath mapping -2. Verify the file exists in vendor directory -3. Ensure import extensions are correct (.js for compiled, .ts for source) - -### Type Errors - -TypeScript errors in your flow definitions are unrelated to the vendor setup. Fix these in your flow files. - -### Changes Not Reflected - -If your changes aren't showing up: - -1. Run the sync script again to update dependencies -2. Check that builds succeed -3. Restart edge functions if needed -4. Clear Deno cache: `deno cache --reload` - -### Permission Errors - -Make the sync script executable: -```bash -chmod +x examples/playground/scripts/sync-edge-deps.sh -``` - -## Best Practices - -1. **Run the sync script** after making changes to pgflow packages -2. **Test with production config** before deploying -3. **Commit import_map.json** - it's part of the development setup -4. **Don't commit _vendor/** - it's generated and gitignored -5. **Keep both configs updated** - maintain both local and production configurations - -## Directory Structure Reference - -``` -supabase/functions/ -├── _vendor/ # Generated - local packages (gitignored) -├── import_map.local.json # Import mappings for local development -├── deno.json # Production imports (npm/jsr packages) -├── pgflow-cron-worker/ # Example edge function -└── analyze_website_worker_*/ # Worker functions -``` - -## How Import Resolution Works - -1. **Production Deployment**: - - Uses `deno.json` directly - - Imports resolve to npm/jsr registries - - No vendor directory needed - -2. **Local Development**: - - `pnpm start-functions` runs with `--import-map import_map.local.json` - - This flag **completely overrides** imports in `deno.json` - - Imports resolve to `_vendor/` directory - - Git status stays clean (no tracked files modified) - -This setup provides a seamless local development experience while maintaining compatibility with production deployments. \ No newline at end of file diff --git a/examples/playground/supabase/functions/README.md b/examples/playground/supabase/functions/README.md deleted file mode 100644 index def7ff2b7..000000000 --- a/examples/playground/supabase/functions/README.md +++ /dev/null @@ -1,57 +0,0 @@ -# pgflow Functions Directory Structure - -This directory contains pgflow functions organized according to best practices for maintainability, reusability, and clarity. - -## Key Components - -### `_flows/` Directory - -Contains flow definitions that compose tasks into directed acyclic graphs (DAGs): - -- **analyze_website.ts** - Orchestrates website analysis by coordinating scraping, summarization, tagging, and saving tasks - -Flows define: - -- Execution order -- Parallelism opportunities -- Data dependencies between tasks -- Error handling and retry logic - -### `_tasks/` Directory - -Contains small, focused functions that each perform a single unit of work: - -- **scrapeWebsite.ts** - Fetches content from a given URL -- **convertToCleanMarkdown.ts** - Converts HTML to clean Markdown format -- **summarizeWithAI.ts** - Uses AI to generate content summaries -- **extractTags.ts** - Extracts relevant tags from content using AI -- **saveWebsite.ts** - Persists website data to the database - -Tasks are: - -- Modular and reusable across different flows -- Testable in isolation -- Designed with clear inputs and outputs -- JSON-serializable (required by pgflow) - -### Edge Function Workers - -Each flow has a corresponding edge function worker that executes the flow logic. By convention, workers are numbered (e.g., `analyze_website_worker_0`, `analyze_website_worker_1`) to enable multiple concurrent workers for the same flow. - -### Supporting Files - -- **utils.ts** - Shared utilities for database connections and common operations -- **database-types.d.ts** - TypeScript type definitions generated from the database schema -- **deno.json** - Configuration for Deno runtime in Edge Functions -- **deno.lock** - Lock file ensuring consistent dependency versions - -## Best Practices - -1. **Task Design**: Keep tasks focused on a single responsibility -2. **Flow Organization**: Use descriptive names and group related logic -3. **Type Safety**: Leverage TypeScript for flow inputs/outputs -4. **Error Handling**: Configure appropriate retries and timeouts -5. **JSON Serialization**: Ensure all data is JSON-serializable - -For more details on organizing pgflow code, see the documentation at: -https://pgflow.io/how-to/organize-flows-code/ diff --git a/examples/playground/supabase/functions/_flows/analyze_website.ts b/examples/playground/supabase/functions/_flows/analyze_website.ts deleted file mode 100644 index 8b68c8e04..000000000 --- a/examples/playground/supabase/functions/_flows/analyze_website.ts +++ /dev/null @@ -1,46 +0,0 @@ -import { Flow } from '@pgflow/dsl/supabase'; -import scrapeWebsite from '../_tasks/scrapeWebsite.ts'; -import summarizeWithAI from '../_tasks/summarizeWithAI.ts'; -import extractTags from '../_tasks/extractTags.ts'; -import saveWebsite from '../_tasks/saveWebsite.ts'; -import { simulateFailure } from '../utils.ts'; - -type Input = { - url: string; - user_id: string; -}; - -export default new Flow({ - slug: 'analyze_website', - maxAttempts: 3, - timeout: 4, - baseDelay: 1, -}) - .step( - { slug: 'website' }, - async (input) => await scrapeWebsite(input.run.url) - ) - .step( - { slug: 'summary', dependsOn: ['website'] }, - async (input) => await summarizeWithAI(input.website.content) - ) - .step({ slug: 'tags', dependsOn: ['website'] }, async (input) => { - await simulateFailure(input.run.url); - - const { keywords } = await extractTags(input.website.content); - return keywords; - }) - .step( - { slug: 'saveToDb', dependsOn: ['summary', 'tags'] }, - async (input, { supabase }) => { - const websiteData = { - user_id: input.run.user_id, - website_url: input.run.url, - summary: input.summary, - tags: input.tags, - }; - const { website } = await saveWebsite(websiteData, supabase); - - return website; - } - ); diff --git a/examples/playground/supabase/functions/_tasks/convertToCleanMarkdown.ts b/examples/playground/supabase/functions/_tasks/convertToCleanMarkdown.ts deleted file mode 100644 index 797363edc..000000000 --- a/examples/playground/supabase/functions/_tasks/convertToCleanMarkdown.ts +++ /dev/null @@ -1,19 +0,0 @@ -import sanitizeHtml from 'npm:sanitize-html'; -import { TurndownService } from 'npm:turndown'; - -export default async function convertToCleanMarkdown(rawHtml: string) { - const cleanHtml = sanitizeHtml(rawHtml, { - allowedTags: ['h1', 'h2', 'h3', 'p', 'a', 'ul', 'ol', 'li', 'code', 'pre'], - allowedAttributes: { - a: ['href', 'title'], - code: ['class'], - }, - allowedIframeHostnames: ['youtube.com'], - }); - const turndown = new TurndownService(); - return turndown.turndown(cleanHtml); -} - -// Usage -const markdown = await convertToCleanMarkdown('https://example.com'); -console.log(markdown); diff --git a/examples/playground/supabase/functions/_tasks/extractTags.ts b/examples/playground/supabase/functions/_tasks/extractTags.ts deleted file mode 100644 index c8aa1de3e..000000000 --- a/examples/playground/supabase/functions/_tasks/extractTags.ts +++ /dev/null @@ -1,52 +0,0 @@ -import Groq from 'groq-sdk'; - -let _groq: Groq | undefined; - -function getGroq() { - if (!_groq) { - _groq = new Groq({ - apiKey: Deno.env.get('GROQ_API_KEY'), - }); - } - - return _groq; -} - -export default async (content: string) => { - const chatCompletion = await getGroq().chat.completions.create({ - messages: [ - { - role: 'system', - content: 'You extract relevant keywords and tags from website content.', - }, - { - role: 'user', - content: `Extract the most important keywords and tags from the following website content. -Return a JSON object with a single "keywords" field containing an array of strings. -Focus on the most relevant and descriptive terms that represent the main topics and themes. -Limit to 5-10 keywords maximum. - -Website content: -${content}`, - }, - ], - model: 'meta-llama/llama-4-scout-17b-16e-instruct', - response_format: { - type: 'json_object', - }, - }); - - // Parse the JSON response directly - const responseJson = JSON.parse( - chatCompletion.choices[0].message.content || '{}', - ); - let keywords: string[] = []; // Default to empty array - - if (Array.isArray(responseJson.keywords)) { - keywords = responseJson.keywords; - } - - return { - keywords: keywords, - }; -}; diff --git a/examples/playground/supabase/functions/_tasks/saveWebsite.ts b/examples/playground/supabase/functions/_tasks/saveWebsite.ts deleted file mode 100644 index 1b1134fc2..000000000 --- a/examples/playground/supabase/functions/_tasks/saveWebsite.ts +++ /dev/null @@ -1,25 +0,0 @@ -import type { SupabaseClient } from '@supabase/supabase-js'; -import type { Database } from '../database-types.d.ts'; - -interface WebsiteData { - user_id: string; - website_url: string; - summary: string; - tags: string[]; -} - -export default async ( - websiteData: WebsiteData, - supabase: SupabaseClient, -) => { - const { data } = await supabase - .schema('public') - .from('websites') - .insert([websiteData]) - .select('*') - .single() - .throwOnError(); - console.log('results', data); - - return { success: true, website: data }; -}; diff --git a/examples/playground/supabase/functions/_tasks/scrapeWebsite.ts b/examples/playground/supabase/functions/_tasks/scrapeWebsite.ts deleted file mode 100644 index 8e191047d..000000000 --- a/examples/playground/supabase/functions/_tasks/scrapeWebsite.ts +++ /dev/null @@ -1,57 +0,0 @@ -export default async function scrapeWebsite(url: string) { - const response = await fetch(url); - - if (!response.ok) { - throw new Error(`Failed to fetch website: ${response.status}`); - } - - const rawContent = await response.text(); - - // Extract text content from HTML - const textContent = stripHtmlTags(rawContent); - - return { - content: textContent, - }; -} - -/** - * Strips HTML tags from content and extracts text - * Focuses on content within the body tag and removes scripts, styles, etc. - */ -function stripHtmlTags(html: string): string { - // Extract body content if possible - const bodyMatch = html.match(/]*>([\s\S]*?)<\/body>/i); - const bodyContent = bodyMatch ? bodyMatch[1] : html; - - // Remove script and style tags and their contents - let cleanedContent = bodyContent - .replace(/)<[^<]*)*<\/script>/gi, '') - .replace(/)<[^<]*)*<\/style>/gi, ''); - - // Remove HTML tags but preserve line breaks - cleanedContent = cleanedContent - .replace(//gi, '\n') - .replace(/<\/p>/gi, '\n\n') - .replace(/<\/div>/gi, '\n') - .replace(/<\/h[1-6]>/gi, '\n\n') - .replace(/<\/li>/gi, '\n') - .replace(/<[^>]*>/g, ''); - - // Decode HTML entities - cleanedContent = cleanedContent - .replace(/ /g, ' ') - .replace(/&/g, '&') - .replace(/</g, '<') - .replace(/>/g, '>') - .replace(/"/g, '"') - .replace(/'/g, "'"); - - // Remove excessive whitespace - cleanedContent = cleanedContent - .replace(/\n\s*\n\s*\n/g, '\n\n') - .replace(/\s+/g, ' ') - .trim(); - - return cleanedContent; -} diff --git a/examples/playground/supabase/functions/_tasks/summarizeWithAI.ts b/examples/playground/supabase/functions/_tasks/summarizeWithAI.ts deleted file mode 100644 index d288aaccd..000000000 --- a/examples/playground/supabase/functions/_tasks/summarizeWithAI.ts +++ /dev/null @@ -1,30 +0,0 @@ -import Groq from 'groq-sdk'; - -let _groq: Groq | undefined; - -function getGroq() { - if (!_groq) { - _groq = new Groq({ - apiKey: Deno.env.get('GROQ_API_KEY'), - }); - } - - return _groq; -} - -export default async (content: string) => { - const chatCompletion = await getGroq().chat.completions.create({ - messages: [ - { - role: 'user', - content: `Please provide a concise summary of the following content:\n\n${content}`, - }, - ], - model: 'meta-llama/llama-4-scout-17b-16e-instruct', - }); - - return ( - chatCompletion.choices[0].message.content ?? - 'Summary not available, please try again.' - ); -}; diff --git a/examples/playground/supabase/functions/analyze_website_worker_0/index.ts b/examples/playground/supabase/functions/analyze_website_worker_0/index.ts deleted file mode 100644 index 07b86d1e9..000000000 --- a/examples/playground/supabase/functions/analyze_website_worker_0/index.ts +++ /dev/null @@ -1,4 +0,0 @@ -import { EdgeWorker } from '@pgflow/edge-worker'; -import AnalyzeWebsite from '../_flows/analyze_website.ts'; - -EdgeWorker.start(AnalyzeWebsite, { maxPollSeconds: 5 }); diff --git a/examples/playground/supabase/functions/analyze_website_worker_1/index.ts b/examples/playground/supabase/functions/analyze_website_worker_1/index.ts deleted file mode 100644 index 07b86d1e9..000000000 --- a/examples/playground/supabase/functions/analyze_website_worker_1/index.ts +++ /dev/null @@ -1,4 +0,0 @@ -import { EdgeWorker } from '@pgflow/edge-worker'; -import AnalyzeWebsite from '../_flows/analyze_website.ts'; - -EdgeWorker.start(AnalyzeWebsite, { maxPollSeconds: 5 }); diff --git a/examples/playground/supabase/functions/analyze_website_worker_2/index.ts b/examples/playground/supabase/functions/analyze_website_worker_2/index.ts deleted file mode 100644 index 07b86d1e9..000000000 --- a/examples/playground/supabase/functions/analyze_website_worker_2/index.ts +++ /dev/null @@ -1,4 +0,0 @@ -import { EdgeWorker } from '@pgflow/edge-worker'; -import AnalyzeWebsite from '../_flows/analyze_website.ts'; - -EdgeWorker.start(AnalyzeWebsite, { maxPollSeconds: 5 }); diff --git a/examples/playground/supabase/functions/analyze_website_worker_3/index.ts b/examples/playground/supabase/functions/analyze_website_worker_3/index.ts deleted file mode 100644 index 07b86d1e9..000000000 --- a/examples/playground/supabase/functions/analyze_website_worker_3/index.ts +++ /dev/null @@ -1,4 +0,0 @@ -import { EdgeWorker } from '@pgflow/edge-worker'; -import AnalyzeWebsite from '../_flows/analyze_website.ts'; - -EdgeWorker.start(AnalyzeWebsite, { maxPollSeconds: 5 }); diff --git a/examples/playground/supabase/functions/database-types.d.ts b/examples/playground/supabase/functions/database-types.d.ts deleted file mode 100644 index a5d1921c1..000000000 --- a/examples/playground/supabase/functions/database-types.d.ts +++ /dev/null @@ -1,657 +0,0 @@ -export type Json = - | string - | number - | boolean - | null - | { [key: string]: Json | undefined } - | Json[] - -export type Database = { - pgflow: { - Tables: { - deps: { - Row: { - created_at: string - dep_slug: string - flow_slug: string - step_slug: string - } - Insert: { - created_at?: string - dep_slug: string - flow_slug: string - step_slug: string - } - Update: { - created_at?: string - dep_slug?: string - flow_slug?: string - step_slug?: string - } - Relationships: [ - { - foreignKeyName: "deps_flow_slug_dep_slug_fkey" - columns: ["flow_slug", "dep_slug"] - isOneToOne: false - referencedRelation: "steps" - referencedColumns: ["flow_slug", "step_slug"] - }, - { - foreignKeyName: "deps_flow_slug_fkey" - columns: ["flow_slug"] - isOneToOne: false - referencedRelation: "flows" - referencedColumns: ["flow_slug"] - }, - { - foreignKeyName: "deps_flow_slug_step_slug_fkey" - columns: ["flow_slug", "step_slug"] - isOneToOne: false - referencedRelation: "steps" - referencedColumns: ["flow_slug", "step_slug"] - }, - ] - } - flows: { - Row: { - created_at: string - flow_slug: string - opt_base_delay: number - opt_max_attempts: number - opt_timeout: number - } - Insert: { - created_at?: string - flow_slug: string - opt_base_delay?: number - opt_max_attempts?: number - opt_timeout?: number - } - Update: { - created_at?: string - flow_slug?: string - opt_base_delay?: number - opt_max_attempts?: number - opt_timeout?: number - } - Relationships: [] - } - runs: { - Row: { - completed_at: string | null - failed_at: string | null - flow_slug: string - input: Json - output: Json | null - remaining_steps: number - run_id: string - started_at: string - status: string - } - Insert: { - completed_at?: string | null - failed_at?: string | null - flow_slug: string - input: Json - output?: Json | null - remaining_steps?: number - run_id?: string - started_at?: string - status?: string - } - Update: { - completed_at?: string | null - failed_at?: string | null - flow_slug?: string - input?: Json - output?: Json | null - remaining_steps?: number - run_id?: string - started_at?: string - status?: string - } - Relationships: [ - { - foreignKeyName: "runs_flow_slug_fkey" - columns: ["flow_slug"] - isOneToOne: false - referencedRelation: "flows" - referencedColumns: ["flow_slug"] - }, - ] - } - step_states: { - Row: { - completed_at: string | null - created_at: string - failed_at: string | null - flow_slug: string - remaining_deps: number - remaining_tasks: number - run_id: string - started_at: string | null - status: string - step_slug: string - } - Insert: { - completed_at?: string | null - created_at?: string - failed_at?: string | null - flow_slug: string - remaining_deps?: number - remaining_tasks?: number - run_id: string - started_at?: string | null - status?: string - step_slug: string - } - Update: { - completed_at?: string | null - created_at?: string - failed_at?: string | null - flow_slug?: string - remaining_deps?: number - remaining_tasks?: number - run_id?: string - started_at?: string | null - status?: string - step_slug?: string - } - Relationships: [ - { - foreignKeyName: "step_states_flow_slug_fkey" - columns: ["flow_slug"] - isOneToOne: false - referencedRelation: "flows" - referencedColumns: ["flow_slug"] - }, - { - foreignKeyName: "step_states_flow_slug_step_slug_fkey" - columns: ["flow_slug", "step_slug"] - isOneToOne: false - referencedRelation: "steps" - referencedColumns: ["flow_slug", "step_slug"] - }, - { - foreignKeyName: "step_states_run_id_fkey" - columns: ["run_id"] - isOneToOne: false - referencedRelation: "runs" - referencedColumns: ["run_id"] - }, - ] - } - step_tasks: { - Row: { - attempts_count: number - completed_at: string | null - error_message: string | null - failed_at: string | null - flow_slug: string - message_id: number | null - output: Json | null - queued_at: string - run_id: string - status: string - step_slug: string - task_index: number - } - Insert: { - attempts_count?: number - completed_at?: string | null - error_message?: string | null - failed_at?: string | null - flow_slug: string - message_id?: number | null - output?: Json | null - queued_at?: string - run_id: string - status?: string - step_slug: string - task_index?: number - } - Update: { - attempts_count?: number - completed_at?: string | null - error_message?: string | null - failed_at?: string | null - flow_slug?: string - message_id?: number | null - output?: Json | null - queued_at?: string - run_id?: string - status?: string - step_slug?: string - task_index?: number - } - Relationships: [ - { - foreignKeyName: "step_tasks_flow_slug_fkey" - columns: ["flow_slug"] - isOneToOne: false - referencedRelation: "flows" - referencedColumns: ["flow_slug"] - }, - { - foreignKeyName: "step_tasks_run_id_fkey" - columns: ["run_id"] - isOneToOne: false - referencedRelation: "runs" - referencedColumns: ["run_id"] - }, - { - foreignKeyName: "step_tasks_run_id_step_slug_fkey" - columns: ["run_id", "step_slug"] - isOneToOne: false - referencedRelation: "step_states" - referencedColumns: ["run_id", "step_slug"] - }, - ] - } - steps: { - Row: { - created_at: string - deps_count: number - flow_slug: string - opt_base_delay: number | null - opt_max_attempts: number | null - opt_timeout: number | null - step_index: number - step_slug: string - step_type: string - } - Insert: { - created_at?: string - deps_count?: number - flow_slug: string - opt_base_delay?: number | null - opt_max_attempts?: number | null - opt_timeout?: number | null - step_index?: number - step_slug: string - step_type?: string - } - Update: { - created_at?: string - deps_count?: number - flow_slug?: string - opt_base_delay?: number | null - opt_max_attempts?: number | null - opt_timeout?: number | null - step_index?: number - step_slug?: string - step_type?: string - } - Relationships: [ - { - foreignKeyName: "steps_flow_slug_fkey" - columns: ["flow_slug"] - isOneToOne: false - referencedRelation: "flows" - referencedColumns: ["flow_slug"] - }, - ] - } - workers: { - Row: { - function_name: string - last_heartbeat_at: string - queue_name: string - started_at: string - stopped_at: string | null - worker_id: string - } - Insert: { - function_name: string - last_heartbeat_at?: string - queue_name: string - started_at?: string - stopped_at?: string | null - worker_id: string - } - Update: { - function_name?: string - last_heartbeat_at?: string - queue_name?: string - started_at?: string - stopped_at?: string | null - worker_id?: string - } - Relationships: [] - } - } - Views: { - [_ in never]: never - } - Functions: { - add_step: { - Args: - | { - flow_slug: string - step_slug: string - deps_slugs: string[] - max_attempts?: number - base_delay?: number - timeout?: number - } - | { - flow_slug: string - step_slug: string - max_attempts?: number - base_delay?: number - timeout?: number - } - Returns: { - created_at: string - deps_count: number - flow_slug: string - opt_base_delay: number | null - opt_max_attempts: number | null - opt_timeout: number | null - step_index: number - step_slug: string - step_type: string - } - } - calculate_retry_delay: { - Args: { base_delay: number; attempts_count: number } - Returns: number - } - complete_task: { - Args: { - run_id: string - step_slug: string - task_index: number - output: Json - } - Returns: { - attempts_count: number - completed_at: string | null - error_message: string | null - failed_at: string | null - flow_slug: string - message_id: number | null - output: Json | null - queued_at: string - run_id: string - status: string - step_slug: string - task_index: number - }[] - } - create_flow: { - Args: { - flow_slug: string - max_attempts?: number - base_delay?: number - timeout?: number - } - Returns: { - created_at: string - flow_slug: string - opt_base_delay: number - opt_max_attempts: number - opt_timeout: number - } - } - fail_task: { - Args: { - run_id: string - step_slug: string - task_index: number - error_message: string - } - Returns: { - attempts_count: number - completed_at: string | null - error_message: string | null - failed_at: string | null - flow_slug: string - message_id: number | null - output: Json | null - queued_at: string - run_id: string - status: string - step_slug: string - task_index: number - }[] - } - is_valid_slug: { - Args: { slug: string } - Returns: boolean - } - maybe_complete_run: { - Args: { run_id: string } - Returns: undefined - } - poll_for_tasks: { - Args: { - queue_name: string - vt: number - qty: number - max_poll_seconds?: number - poll_interval_ms?: number - } - Returns: Database["pgflow"]["CompositeTypes"]["step_task_record"][] - } - read_with_poll: { - Args: { - queue_name: string - vt: number - qty: number - max_poll_seconds?: number - poll_interval_ms?: number - conditional?: Json - } - Returns: unknown[] - } - start_flow: { - Args: { flow_slug: string; input: Json } - Returns: { - completed_at: string | null - failed_at: string | null - flow_slug: string - input: Json - output: Json | null - remaining_steps: number - run_id: string - started_at: string - status: string - }[] - } - start_ready_steps: { - Args: { run_id: string } - Returns: undefined - } - } - Enums: { - [_ in never]: never - } - CompositeTypes: { - step_task_record: { - flow_slug: string | null - run_id: string | null - step_slug: string | null - input: Json | null - msg_id: number | null - } - } - } - public: { - Tables: { - websites: { - Row: { - created_at: string - id: number - sentiment: number - summary: string - tags: string[] - updated_at: string - user_id: string - website_url: string - } - Insert: { - created_at?: string - id?: number - sentiment: number - summary: string - tags?: string[] - updated_at?: string - user_id: string - website_url: string - } - Update: { - created_at?: string - id?: number - sentiment?: number - summary?: string - tags?: string[] - updated_at?: string - user_id?: string - website_url?: string - } - Relationships: [] - } - } - Views: { - [_ in never]: never - } - Functions: { - start_analyze_website_flow: { - Args: { url: string } - Returns: { - completed_at: string | null - failed_at: string | null - flow_slug: string - input: Json - output: Json | null - remaining_steps: number - run_id: string - started_at: string - status: string - } - } - } - Enums: { - [_ in never]: never - } - CompositeTypes: { - [_ in never]: never - } - } -} - -type DefaultSchema = Database[Extract] - -export type Tables< - DefaultSchemaTableNameOrOptions extends - | keyof (DefaultSchema["Tables"] & DefaultSchema["Views"]) - | { schema: keyof Database }, - TableName extends DefaultSchemaTableNameOrOptions extends { - schema: keyof Database - } - ? keyof (Database[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] & - Database[DefaultSchemaTableNameOrOptions["schema"]]["Views"]) - : never = never, -> = DefaultSchemaTableNameOrOptions extends { schema: keyof Database } - ? (Database[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] & - Database[DefaultSchemaTableNameOrOptions["schema"]]["Views"])[TableName] extends { - Row: infer R - } - ? R - : never - : DefaultSchemaTableNameOrOptions extends keyof (DefaultSchema["Tables"] & - DefaultSchema["Views"]) - ? (DefaultSchema["Tables"] & - DefaultSchema["Views"])[DefaultSchemaTableNameOrOptions] extends { - Row: infer R - } - ? R - : never - : never - -export type TablesInsert< - DefaultSchemaTableNameOrOptions extends - | keyof DefaultSchema["Tables"] - | { schema: keyof Database }, - TableName extends DefaultSchemaTableNameOrOptions extends { - schema: keyof Database - } - ? keyof Database[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] - : never = never, -> = DefaultSchemaTableNameOrOptions extends { schema: keyof Database } - ? Database[DefaultSchemaTableNameOrOptions["schema"]]["Tables"][TableName] extends { - Insert: infer I - } - ? I - : never - : DefaultSchemaTableNameOrOptions extends keyof DefaultSchema["Tables"] - ? DefaultSchema["Tables"][DefaultSchemaTableNameOrOptions] extends { - Insert: infer I - } - ? I - : never - : never - -export type TablesUpdate< - DefaultSchemaTableNameOrOptions extends - | keyof DefaultSchema["Tables"] - | { schema: keyof Database }, - TableName extends DefaultSchemaTableNameOrOptions extends { - schema: keyof Database - } - ? keyof Database[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] - : never = never, -> = DefaultSchemaTableNameOrOptions extends { schema: keyof Database } - ? Database[DefaultSchemaTableNameOrOptions["schema"]]["Tables"][TableName] extends { - Update: infer U - } - ? U - : never - : DefaultSchemaTableNameOrOptions extends keyof DefaultSchema["Tables"] - ? DefaultSchema["Tables"][DefaultSchemaTableNameOrOptions] extends { - Update: infer U - } - ? U - : never - : never - -export type Enums< - DefaultSchemaEnumNameOrOptions extends - | keyof DefaultSchema["Enums"] - | { schema: keyof Database }, - EnumName extends DefaultSchemaEnumNameOrOptions extends { - schema: keyof Database - } - ? keyof Database[DefaultSchemaEnumNameOrOptions["schema"]]["Enums"] - : never = never, -> = DefaultSchemaEnumNameOrOptions extends { schema: keyof Database } - ? Database[DefaultSchemaEnumNameOrOptions["schema"]]["Enums"][EnumName] - : DefaultSchemaEnumNameOrOptions extends keyof DefaultSchema["Enums"] - ? DefaultSchema["Enums"][DefaultSchemaEnumNameOrOptions] - : never - -export type CompositeTypes< - PublicCompositeTypeNameOrOptions extends - | keyof DefaultSchema["CompositeTypes"] - | { schema: keyof Database }, - CompositeTypeName extends PublicCompositeTypeNameOrOptions extends { - schema: keyof Database - } - ? keyof Database[PublicCompositeTypeNameOrOptions["schema"]]["CompositeTypes"] - : never = never, -> = PublicCompositeTypeNameOrOptions extends { schema: keyof Database } - ? Database[PublicCompositeTypeNameOrOptions["schema"]]["CompositeTypes"][CompositeTypeName] - : PublicCompositeTypeNameOrOptions extends keyof DefaultSchema["CompositeTypes"] - ? DefaultSchema["CompositeTypes"][PublicCompositeTypeNameOrOptions] - : never - -export const Constants = { - pgflow: { - Enums: {}, - }, - public: { - Enums: {}, - }, -} as const - diff --git a/examples/playground/supabase/functions/deno.json b/examples/playground/supabase/functions/deno.json deleted file mode 100644 index 04121074e..000000000 --- a/examples/playground/supabase/functions/deno.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "imports": { - "@pgflow/edge-worker": "jsr:@pgflow/edge-worker@0.0.0-array-map-steps-cd94242a-20251008042921", - "@pgflow/edge-worker/_internal": "jsr:@pgflow/edge-worker@0.0.0-array-map-steps-cd94242a-20251008042921/_internal", - "@pgflow/dsl": "npm:@pgflow/dsl@0.0.0-array-map-steps-cd94242a-20251008042921", - "@pgflow/dsl/supabase": "npm:@pgflow/dsl@0.0.0-array-map-steps-cd94242a-20251008042921/supabase", - "@pgflow/core": "npm:@pgflow/core@0.0.0-array-map-steps-cd94242a-20251008042921", - "@henrygd/queue": "jsr:@henrygd/queue@^1.0.7", - "@supabase/supabase-js": "jsr:@supabase/supabase-js@^2.49.4", - "groq-sdk": "npm:groq-sdk@^0.20.1", - "postgres": "npm:postgres@3.4.5", - "sanitize-html": "npm:sanitize-html@^2.16.0", - "turndown": "npm:turndown@^7.2.0" - } -} diff --git a/examples/playground/supabase/functions/deno.lock b/examples/playground/supabase/functions/deno.lock deleted file mode 100644 index 8f77d40ca..000000000 --- a/examples/playground/supabase/functions/deno.lock +++ /dev/null @@ -1,623 +0,0 @@ -{ - "version": "3", - "packages": { - "specifiers": { - "jsr:@henrygd/queue@^1.0.7": "jsr:@henrygd/queue@1.0.7", - "jsr:@pgflow/edge-worker@0.0.0-array-map-steps-cd94242a-20251008042921": "jsr:@pgflow/edge-worker@0.0.0-array-map-steps-cd94242a-20251008042921", - "jsr:@supabase/supabase-js@^2.49.4": "jsr:@supabase/supabase-js@2.58.0", - "npm:@pgflow/core@0.0.0-array-map-steps-cd94242a-20251008042921": "npm:@pgflow/core@0.0.0-array-map-steps-cd94242a-20251008042921", - "npm:@pgflow/dsl@0.0.0-array-map-steps-cd94242a-20251008042921": "npm:@pgflow/dsl@0.0.0-array-map-steps-cd94242a-20251008042921", - "npm:@supabase/auth-js@2.69.1": "npm:@supabase/auth-js@2.69.1", - "npm:@supabase/auth-js@2.70.0": "npm:@supabase/auth-js@2.70.0", - "npm:@supabase/auth-js@2.72.0": "npm:@supabase/auth-js@2.72.0", - "npm:@supabase/functions-js@2.4.4": "npm:@supabase/functions-js@2.4.4", - "npm:@supabase/functions-js@2.5.0": "npm:@supabase/functions-js@2.5.0", - "npm:@supabase/node-fetch@2.6.15": "npm:@supabase/node-fetch@2.6.15", - "npm:@supabase/postgrest-js@1.19.4": "npm:@supabase/postgrest-js@1.19.4", - "npm:@supabase/postgrest-js@1.21.4": "npm:@supabase/postgrest-js@1.21.4", - "npm:@supabase/realtime-js@2.11.10": "npm:@supabase/realtime-js@2.11.10", - "npm:@supabase/realtime-js@2.11.15": "npm:@supabase/realtime-js@2.11.15_ws@8.18.2", - "npm:@supabase/realtime-js@2.11.2": "npm:@supabase/realtime-js@2.11.2", - "npm:@supabase/realtime-js@2.15.5": "npm:@supabase/realtime-js@2.15.5", - "npm:@supabase/storage-js@2.12.2": "npm:@supabase/storage-js@2.12.2", - "npm:@supabase/storage-js@2.7.1": "npm:@supabase/storage-js@2.7.1", - "npm:@supabase/supabase-js@^2.47.10": "npm:@supabase/supabase-js@2.74.0", - "npm:groq-sdk@^0.20.1": "npm:groq-sdk@0.20.1", - "npm:postgres@3.4.5": "npm:postgres@3.4.5", - "npm:sanitize-html": "npm:sanitize-html@2.16.0", - "npm:turndown": "npm:turndown@7.2.0" - }, - "jsr": { - "@henrygd/queue@1.0.7": { - "integrity": "98cade132744bb420957c5413393f76eb8ba7261826f026c8a89a562b8fa2961" - }, - "@pgflow/edge-worker@0.0.0-array-map-steps-cd94242a-20251008042921": { - "integrity": "1aabede3ce7d770911287274ac09ef86502c3484e8cb989d55f83d6afd5419b3", - "dependencies": [ - "jsr:@henrygd/queue@^1.0.7", - "npm:@pgflow/core@0.0.0-array-map-steps-cd94242a-20251008042921", - "npm:@pgflow/dsl@0.0.0-array-map-steps-cd94242a-20251008042921", - "npm:@supabase/supabase-js@^2.47.10", - "npm:postgres@3.4.5" - ] - }, - "@pgflow/edge-worker@0.4.3": { - "integrity": "914f3ce2d46d97be7c314face183ebbd1f5a9d85f6edfc2019cee37d2ed5d6c9", - "dependencies": [ - "jsr:@henrygd/queue@^1.0.7", - "npm:@pgflow/core@0.4.3", - "npm:@pgflow/dsl@0.4.3", - "npm:postgres@3.4.5" - ] - }, - "@supabase/supabase-js@2.49.4": { - "integrity": "4b785f9cd4a62feb7b3f84606bb923a4ea51e3e000eafff0972bc779240b7592", - "dependencies": [ - "npm:@supabase/auth-js@2.69.1", - "npm:@supabase/functions-js@2.4.4", - "npm:@supabase/node-fetch@2.6.15", - "npm:@supabase/postgrest-js@1.19.4", - "npm:@supabase/realtime-js@2.11.2", - "npm:@supabase/storage-js@2.7.1" - ] - }, - "@supabase/supabase-js@2.50.0": { - "integrity": "d583ced1777f6bda163de3aca49f9b00b62c48633c5d129492531a11b3542457", - "dependencies": [ - "npm:@supabase/auth-js@2.70.0", - "npm:@supabase/functions-js@2.4.4", - "npm:@supabase/node-fetch@2.6.15", - "npm:@supabase/postgrest-js@1.19.4", - "npm:@supabase/realtime-js@2.11.10", - "npm:@supabase/storage-js@2.7.1" - ] - }, - "@supabase/supabase-js@2.50.2": { - "integrity": "d7fc817c78310906df97c03121b184ac7dcc083f893ce61e057e02312c6638c3", - "dependencies": [ - "npm:@supabase/auth-js@2.70.0", - "npm:@supabase/functions-js@2.4.4", - "npm:@supabase/node-fetch@2.6.15", - "npm:@supabase/postgrest-js@1.19.4", - "npm:@supabase/realtime-js@2.11.15", - "npm:@supabase/storage-js@2.7.1" - ] - }, - "@supabase/supabase-js@2.58.0": { - "integrity": "4d04e72e9f632b451ac7d1a84de0b85249c0097fdf06253f371c1f0a23e62c87", - "dependencies": [ - "npm:@supabase/auth-js@2.72.0", - "npm:@supabase/functions-js@2.5.0", - "npm:@supabase/node-fetch@2.6.15", - "npm:@supabase/postgrest-js@1.21.4", - "npm:@supabase/realtime-js@2.15.5", - "npm:@supabase/storage-js@2.12.2" - ] - } - }, - "npm": { - "@mixmark-io/domino@2.2.0": { - "integrity": "sha512-Y28PR25bHXUg88kCV7nivXrP2Nj2RueZ3/l/jdx6J9f8J4nsEGcgX0Qe6lt7Pa+J79+kPiJU3LguR6O/6zrLOw==", - "dependencies": {} - }, - "@pgflow/core@0.0.0-array-map-steps-cd94242a-20251008042921": { - "integrity": "sha512-2E/HJSp7QHCtlNHHTlwbp/f8gVM6kQba3u2tFghvnJpqsQxE0sEzQwpZzO/OcgjmpmuuFYxroTviBjilFif/kw==", - "dependencies": { - "@pgflow/dsl": "@pgflow/dsl@0.0.0-array-map-steps-cd94242a-20251008042921", - "postgres": "postgres@3.4.5" - } - }, - "@pgflow/dsl@0.0.0-array-map-steps-cd94242a-20251008042921": { - "integrity": "sha512-0Ph2n6tiia4zDz/DT8Eo1nLiJC+A5ZZaKe9hmG2bEgAT1qNKnq9osP8ka2Ds3DxVMJR6N+fou8BE+SOv9D+FAA==", - "dependencies": {} - }, - "@supabase/auth-js@2.69.1": { - "integrity": "sha512-FILtt5WjCNzmReeRLq5wRs3iShwmnWgBvxHfqapC/VoljJl+W8hDAyFmf1NVw3zH+ZjZ05AKxiKxVeb0HNWRMQ==", - "dependencies": { - "@supabase/node-fetch": "@supabase/node-fetch@2.6.15" - } - }, - "@supabase/auth-js@2.70.0": { - "integrity": "sha512-BaAK/tOAZFJtzF1sE3gJ2FwTjLf4ky3PSvcvLGEgEmO4BSBkwWKu8l67rLLIBZPDnCyV7Owk2uPyKHa0kj5QGg==", - "dependencies": { - "@supabase/node-fetch": "@supabase/node-fetch@2.6.15" - } - }, - "@supabase/auth-js@2.72.0": { - "integrity": "sha512-4+bnUrtTDK1YD0/FCx2YtMiQH5FGu9Jlf4IQi5kcqRwRwqp2ey39V61nHNdH86jm3DIzz0aZKiWfTW8qXk1swQ==", - "dependencies": { - "@supabase/node-fetch": "@supabase/node-fetch@2.6.15" - } - }, - "@supabase/auth-js@2.74.0": { - "integrity": "sha512-EJYDxYhBCOS40VJvfQ5zSjo8Ku7JbTICLTcmXt4xHMQZt4IumpRfHg11exXI9uZ6G7fhsQlNgbzDhFN4Ni9NnA==", - "dependencies": { - "@supabase/node-fetch": "@supabase/node-fetch@2.6.15" - } - }, - "@supabase/functions-js@2.4.4": { - "integrity": "sha512-WL2p6r4AXNGwop7iwvul2BvOtuJ1YQy8EbOd0dhG1oN1q8el/BIRSFCFnWAMM/vJJlHWLi4ad22sKbKr9mvjoA==", - "dependencies": { - "@supabase/node-fetch": "@supabase/node-fetch@2.6.15" - } - }, - "@supabase/functions-js@2.5.0": { - "integrity": "sha512-SXBx6Jvp+MOBekeKFu+G11YLYPeVeGQl23eYyAG9+Ro0pQ1aIP0UZNIBxHKNHqxzR0L0n6gysNr2KT3841NATw==", - "dependencies": { - "@supabase/node-fetch": "@supabase/node-fetch@2.6.15" - } - }, - "@supabase/functions-js@2.74.0": { - "integrity": "sha512-VqWYa981t7xtIFVf7LRb9meklHckbH/tqwaML5P3LgvlaZHpoSPjMCNLcquuLYiJLxnh2rio7IxLh+VlvRvSWw==", - "dependencies": { - "@supabase/node-fetch": "@supabase/node-fetch@2.6.15" - } - }, - "@supabase/node-fetch@2.6.15": { - "integrity": "sha512-1ibVeYUacxWYi9i0cf5efil6adJ9WRyZBLivgjs+AUpewx1F3xPi7gLgaASI2SmIQxPoCEjAsLAzKPgMJVgOUQ==", - "dependencies": { - "whatwg-url": "whatwg-url@5.0.0" - } - }, - "@supabase/postgrest-js@1.19.4": { - "integrity": "sha512-O4soKqKtZIW3olqmbXXbKugUtByD2jPa8kL2m2c1oozAO11uCcGrRhkZL0kVxjBLrXHE0mdSkFsMj7jDSfyNpw==", - "dependencies": { - "@supabase/node-fetch": "@supabase/node-fetch@2.6.15" - } - }, - "@supabase/postgrest-js@1.21.4": { - "integrity": "sha512-TxZCIjxk6/dP9abAi89VQbWWMBbybpGWyvmIzTd79OeravM13OjR/YEYeyUOPcM1C3QyvXkvPZhUfItvmhY1IQ==", - "dependencies": { - "@supabase/node-fetch": "@supabase/node-fetch@2.6.15" - } - }, - "@supabase/postgrest-js@2.74.0": { - "integrity": "sha512-9Ypa2eS0Ib/YQClE+BhDSjx7OKjYEF6LAGjTB8X4HucdboGEwR0LZKctNfw6V0PPIAVjjzZxIlNBXGv0ypIkHw==", - "dependencies": { - "@supabase/node-fetch": "@supabase/node-fetch@2.6.15" - } - }, - "@supabase/realtime-js@2.11.10": { - "integrity": "sha512-SJKVa7EejnuyfImrbzx+HaD9i6T784khuw1zP+MBD7BmJYChegGxYigPzkKX8CK8nGuDntmeSD3fvriaH0EGZA==", - "dependencies": { - "@supabase/node-fetch": "@supabase/node-fetch@2.6.15", - "@types/phoenix": "@types/phoenix@1.6.6", - "@types/ws": "@types/ws@8.18.1", - "ws": "ws@8.18.2" - } - }, - "@supabase/realtime-js@2.11.15_ws@8.18.2": { - "integrity": "sha512-HQKRnwAqdVqJW/P9TjKVK+/ETpW4yQ8tyDPPtRMKOH4Uh3vQD74vmj353CYs8+YwVBKubeUOOEpI9CT8mT4obw==", - "dependencies": { - "@supabase/node-fetch": "@supabase/node-fetch@2.6.15", - "@types/phoenix": "@types/phoenix@1.6.6", - "@types/ws": "@types/ws@8.18.1", - "isows": "isows@1.0.7_ws@8.18.2", - "ws": "ws@8.18.2" - } - }, - "@supabase/realtime-js@2.11.2": { - "integrity": "sha512-u/XeuL2Y0QEhXSoIPZZwR6wMXgB+RQbJzG9VErA3VghVt7uRfSVsjeqd7m5GhX3JR6dM/WRmLbVR8URpDWG4+w==", - "dependencies": { - "@supabase/node-fetch": "@supabase/node-fetch@2.6.15", - "@types/phoenix": "@types/phoenix@1.6.6", - "@types/ws": "@types/ws@8.18.1", - "ws": "ws@8.18.1" - } - }, - "@supabase/realtime-js@2.15.5": { - "integrity": "sha512-/Rs5Vqu9jejRD8ZeuaWXebdkH+J7V6VySbCZ/zQM93Ta5y3mAmocjioa/nzlB6qvFmyylUgKVS1KpE212t30OA==", - "dependencies": { - "@supabase/node-fetch": "@supabase/node-fetch@2.6.15", - "@types/phoenix": "@types/phoenix@1.6.6", - "@types/ws": "@types/ws@8.18.1", - "ws": "ws@8.18.2" - } - }, - "@supabase/realtime-js@2.74.0": { - "integrity": "sha512-K5VqpA4/7RO1u1nyD5ICFKzWKu58bIDcPxHY0aFA7MyWkFd0pzi/XYXeoSsAifnD9p72gPIpgxVXCQZKJg1ktQ==", - "dependencies": { - "@supabase/node-fetch": "@supabase/node-fetch@2.6.15", - "@types/phoenix": "@types/phoenix@1.6.6", - "@types/ws": "@types/ws@8.18.1", - "ws": "ws@8.18.2" - } - }, - "@supabase/storage-js@2.12.2": { - "integrity": "sha512-SiySHxi3q7gia7NBYpsYRu8gyI0NhFwSORMxbZIxJ/zAVkN6QpwDRan158CJ+UdzD4WB/rQMAGRqIJQP+7ccAQ==", - "dependencies": { - "@supabase/node-fetch": "@supabase/node-fetch@2.6.15" - } - }, - "@supabase/storage-js@2.7.1": { - "integrity": "sha512-asYHcyDR1fKqrMpytAS1zjyEfvxuOIp1CIXX7ji4lHHcJKqyk+sLl/Vxgm4sN6u8zvuUtae9e4kDxQP2qrwWBA==", - "dependencies": { - "@supabase/node-fetch": "@supabase/node-fetch@2.6.15" - } - }, - "@supabase/storage-js@2.74.0": { - "integrity": "sha512-o0cTQdMqHh4ERDLtjUp1/KGPbQoNwKRxUh6f8+KQyjC5DSmiw/r+jgFe/WHh067aW+WU8nA9Ytw9ag7OhzxEkQ==", - "dependencies": { - "@supabase/node-fetch": "@supabase/node-fetch@2.6.15" - } - }, - "@supabase/supabase-js@2.74.0": { - "integrity": "sha512-IEMM/V6gKdP+N/X31KDIczVzghDpiPWFGLNjS8Rus71KvV6y6ueLrrE/JGCHDrU+9pq5copF3iCa0YQh+9Lq9Q==", - "dependencies": { - "@supabase/auth-js": "@supabase/auth-js@2.74.0", - "@supabase/functions-js": "@supabase/functions-js@2.74.0", - "@supabase/node-fetch": "@supabase/node-fetch@2.6.15", - "@supabase/postgrest-js": "@supabase/postgrest-js@2.74.0", - "@supabase/realtime-js": "@supabase/realtime-js@2.74.0", - "@supabase/storage-js": "@supabase/storage-js@2.74.0" - } - }, - "@types/node-fetch@2.6.12": { - "integrity": "sha512-8nneRWKCg3rMtF69nLQJnOYUcbafYeFSjqkw3jCRLsqkWFlHaoQrr5mXmofFGOx3DKn7UfmBMyov8ySvLRVldA==", - "dependencies": { - "@types/node": "@types/node@18.16.19", - "form-data": "form-data@4.0.2" - } - }, - "@types/node@18.16.19": { - "integrity": "sha512-IXl7o+R9iti9eBW4Wg2hx1xQDig183jj7YLn8F7udNceyfkbn1ZxmzZXuak20gR40D7pIkIY1kYGx5VIGbaHKA==", - "dependencies": {} - }, - "@types/node@18.19.87": { - "integrity": "sha512-OIAAu6ypnVZHmsHCeJ+7CCSub38QNBS9uceMQeg7K5Ur0Jr+wG9wEOEvvMbhp09pxD5czIUy/jND7s7Tb6Nw7A==", - "dependencies": { - "undici-types": "undici-types@5.26.5" - } - }, - "@types/phoenix@1.6.6": { - "integrity": "sha512-PIzZZlEppgrpoT2QgbnDU+MMzuR6BbCjllj0bM70lWoejMeNJAxCchxnv7J3XFkI8MpygtRpzXrIlmWUBclP5A==", - "dependencies": {} - }, - "@types/ws@8.18.1": { - "integrity": "sha512-ThVF6DCVhA8kUGy+aazFQ4kXQ7E1Ty7A3ypFOe0IcJV8O/M511G99AW24irKrW56Wt44yG9+ij8FaqoBGkuBXg==", - "dependencies": { - "@types/node": "@types/node@18.16.19" - } - }, - "abort-controller@3.0.0": { - "integrity": "sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg==", - "dependencies": { - "event-target-shim": "event-target-shim@5.0.1" - } - }, - "agentkeepalive@4.6.0": { - "integrity": "sha512-kja8j7PjmncONqaTsB8fQ+wE2mSU2DJ9D4XKoJ5PFWIdRMa6SLSN1ff4mOr4jCbfRSsxR4keIiySJU0N9T5hIQ==", - "dependencies": { - "humanize-ms": "humanize-ms@1.2.1" - } - }, - "asynckit@0.4.0": { - "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==", - "dependencies": {} - }, - "call-bind-apply-helpers@1.0.2": { - "integrity": "sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ==", - "dependencies": { - "es-errors": "es-errors@1.3.0", - "function-bind": "function-bind@1.1.2" - } - }, - "combined-stream@1.0.8": { - "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", - "dependencies": { - "delayed-stream": "delayed-stream@1.0.0" - } - }, - "deepmerge@4.3.1": { - "integrity": "sha512-3sUqbMEc77XqpdNO7FRyRog+eW3ph+GYCbj+rK+uYyRMuwsVy0rMiVtPn+QJlKFvWP/1PYpapqYn0Me2knFn+A==", - "dependencies": {} - }, - "delayed-stream@1.0.0": { - "integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==", - "dependencies": {} - }, - "dom-serializer@2.0.0": { - "integrity": "sha512-wIkAryiqt/nV5EQKqQpo3SToSOV9J0DnbJqwK7Wv/Trc92zIAYZ4FlMu+JPFW1DfGFt81ZTCGgDEabffXeLyJg==", - "dependencies": { - "domelementtype": "domelementtype@2.3.0", - "domhandler": "domhandler@5.0.3", - "entities": "entities@4.5.0" - } - }, - "domelementtype@2.3.0": { - "integrity": "sha512-OLETBj6w0OsagBwdXnPdN0cnMfF9opN69co+7ZrbfPGrdpPVNBUj02spi6B1N7wChLQiPn4CSH/zJvXw56gmHw==", - "dependencies": {} - }, - "domhandler@5.0.3": { - "integrity": "sha512-cgwlv/1iFQiFnU96XXgROh8xTeetsnJiDsTc7TYCLFd9+/WNkIqPTxiM/8pSd8VIrhXGTf1Ny1q1hquVqDJB5w==", - "dependencies": { - "domelementtype": "domelementtype@2.3.0" - } - }, - "domutils@3.2.2": { - "integrity": "sha512-6kZKyUajlDuqlHKVX1w7gyslj9MPIXzIFiz/rGu35uC1wMi+kMhQwGhl4lt9unC9Vb9INnY9Z3/ZA3+FhASLaw==", - "dependencies": { - "dom-serializer": "dom-serializer@2.0.0", - "domelementtype": "domelementtype@2.3.0", - "domhandler": "domhandler@5.0.3" - } - }, - "dunder-proto@1.0.1": { - "integrity": "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==", - "dependencies": { - "call-bind-apply-helpers": "call-bind-apply-helpers@1.0.2", - "es-errors": "es-errors@1.3.0", - "gopd": "gopd@1.2.0" - } - }, - "entities@4.5.0": { - "integrity": "sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw==", - "dependencies": {} - }, - "es-define-property@1.0.1": { - "integrity": "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==", - "dependencies": {} - }, - "es-errors@1.3.0": { - "integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==", - "dependencies": {} - }, - "es-object-atoms@1.1.1": { - "integrity": "sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==", - "dependencies": { - "es-errors": "es-errors@1.3.0" - } - }, - "es-set-tostringtag@2.1.0": { - "integrity": "sha512-j6vWzfrGVfyXxge+O0x5sh6cvxAog0a/4Rdd2K36zCMV5eJ+/+tOAngRO8cODMNWbVRdVlmGZQL2YS3yR8bIUA==", - "dependencies": { - "es-errors": "es-errors@1.3.0", - "get-intrinsic": "get-intrinsic@1.3.0", - "has-tostringtag": "has-tostringtag@1.0.2", - "hasown": "hasown@2.0.2" - } - }, - "escape-string-regexp@4.0.0": { - "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", - "dependencies": {} - }, - "event-target-shim@5.0.1": { - "integrity": "sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ==", - "dependencies": {} - }, - "form-data-encoder@1.7.2": { - "integrity": "sha512-qfqtYan3rxrnCk1VYaA4H+Ms9xdpPqvLZa6xmMgFvhO32x7/3J/ExcTd6qpxM0vH2GdMI+poehyBZvqfMTto8A==", - "dependencies": {} - }, - "form-data@4.0.2": { - "integrity": "sha512-hGfm/slu0ZabnNt4oaRZ6uREyfCj6P4fT/n6A1rGV+Z0VdGXjfOhVUpkn6qVQONHGIFwmveGXyDs75+nr6FM8w==", - "dependencies": { - "asynckit": "asynckit@0.4.0", - "combined-stream": "combined-stream@1.0.8", - "es-set-tostringtag": "es-set-tostringtag@2.1.0", - "mime-types": "mime-types@2.1.35" - } - }, - "formdata-node@4.4.1": { - "integrity": "sha512-0iirZp3uVDjVGt9p49aTaqjk84TrglENEDuqfdlZQ1roC9CWlPk6Avf8EEnZNcAqPonwkG35x4n3ww/1THYAeQ==", - "dependencies": { - "node-domexception": "node-domexception@1.0.0", - "web-streams-polyfill": "web-streams-polyfill@4.0.0-beta.3" - } - }, - "function-bind@1.1.2": { - "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==", - "dependencies": {} - }, - "get-intrinsic@1.3.0": { - "integrity": "sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==", - "dependencies": { - "call-bind-apply-helpers": "call-bind-apply-helpers@1.0.2", - "es-define-property": "es-define-property@1.0.1", - "es-errors": "es-errors@1.3.0", - "es-object-atoms": "es-object-atoms@1.1.1", - "function-bind": "function-bind@1.1.2", - "get-proto": "get-proto@1.0.1", - "gopd": "gopd@1.2.0", - "has-symbols": "has-symbols@1.1.0", - "hasown": "hasown@2.0.2", - "math-intrinsics": "math-intrinsics@1.1.0" - } - }, - "get-proto@1.0.1": { - "integrity": "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==", - "dependencies": { - "dunder-proto": "dunder-proto@1.0.1", - "es-object-atoms": "es-object-atoms@1.1.1" - } - }, - "gopd@1.2.0": { - "integrity": "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==", - "dependencies": {} - }, - "groq-sdk@0.20.1": { - "integrity": "sha512-I/U7mHDcanKHR/P0oKSSS0M6oHR69G1QgtMplqmF3gSejJ5ihV7l+/0OqbNoqOzYoQKG4XH7O4zCqMoTKCztQQ==", - "dependencies": { - "@types/node": "@types/node@18.19.87", - "@types/node-fetch": "@types/node-fetch@2.6.12", - "abort-controller": "abort-controller@3.0.0", - "agentkeepalive": "agentkeepalive@4.6.0", - "form-data-encoder": "form-data-encoder@1.7.2", - "formdata-node": "formdata-node@4.4.1", - "node-fetch": "node-fetch@2.7.0" - } - }, - "has-symbols@1.1.0": { - "integrity": "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==", - "dependencies": {} - }, - "has-tostringtag@1.0.2": { - "integrity": "sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==", - "dependencies": { - "has-symbols": "has-symbols@1.1.0" - } - }, - "hasown@2.0.2": { - "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==", - "dependencies": { - "function-bind": "function-bind@1.1.2" - } - }, - "htmlparser2@8.0.2": { - "integrity": "sha512-GYdjWKDkbRLkZ5geuHs5NY1puJ+PXwP7+fHPRz06Eirsb9ugf6d8kkXav6ADhcODhFFPMIXyxkxSuMf3D6NCFA==", - "dependencies": { - "domelementtype": "domelementtype@2.3.0", - "domhandler": "domhandler@5.0.3", - "domutils": "domutils@3.2.2", - "entities": "entities@4.5.0" - } - }, - "humanize-ms@1.2.1": { - "integrity": "sha512-Fl70vYtsAFb/C06PTS9dZBo7ihau+Tu/DNCk/OyHhea07S+aeMWpFFkUaXRa8fI+ScZbEI8dfSxwY7gxZ9SAVQ==", - "dependencies": { - "ms": "ms@2.1.3" - } - }, - "is-plain-object@5.0.0": { - "integrity": "sha512-VRSzKkbMm5jMDoKLbltAkFQ5Qr7VDiTFGXxYFXXowVj387GeGNOCsOH6Msy00SGZ3Fp84b1Naa1psqgcCIEP5Q==", - "dependencies": {} - }, - "isows@1.0.7_ws@8.18.2": { - "integrity": "sha512-I1fSfDCZL5P0v33sVqeTDSpcstAg/N+wF5HS033mogOVIp4B+oHC7oOCsA3axAbBSGTJ8QubbNmnIRN/h8U7hg==", - "dependencies": { - "ws": "ws@8.18.2" - } - }, - "math-intrinsics@1.1.0": { - "integrity": "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==", - "dependencies": {} - }, - "mime-db@1.52.0": { - "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", - "dependencies": {} - }, - "mime-types@2.1.35": { - "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", - "dependencies": { - "mime-db": "mime-db@1.52.0" - } - }, - "ms@2.1.3": { - "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", - "dependencies": {} - }, - "nanoid@3.3.11": { - "integrity": "sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w==", - "dependencies": {} - }, - "node-domexception@1.0.0": { - "integrity": "sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ==", - "dependencies": {} - }, - "node-fetch@2.7.0": { - "integrity": "sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==", - "dependencies": { - "whatwg-url": "whatwg-url@5.0.0" - } - }, - "parse-srcset@1.0.2": { - "integrity": "sha512-/2qh0lav6CmI15FzA3i/2Bzk2zCgQhGMkvhOhKNcBVQ1ldgpbfiNTVslmooUmWJcADi1f1kIeynbDRVzNlfR6Q==", - "dependencies": {} - }, - "picocolors@1.1.1": { - "integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==", - "dependencies": {} - }, - "postcss@8.5.3": { - "integrity": "sha512-dle9A3yYxlBSrt8Fu+IpjGT8SY8hN0mlaA6GY8t0P5PjIOZemULz/E2Bnm/2dcUOena75OTNkHI76uZBNUUq3A==", - "dependencies": { - "nanoid": "nanoid@3.3.11", - "picocolors": "picocolors@1.1.1", - "source-map-js": "source-map-js@1.2.1" - } - }, - "postgres@3.4.5": { - "integrity": "sha512-cDWgoah1Gez9rN3H4165peY9qfpEo+SA61oQv65O3cRUE1pOEoJWwddwcqKE8XZYjbblOJlYDlLV4h67HrEVDg==", - "dependencies": {} - }, - "sanitize-html@2.16.0": { - "integrity": "sha512-0s4caLuHHaZFVxFTG74oW91+j6vW7gKbGD6CD2+miP73CE6z6YtOBN0ArtLd2UGyi4IC7K47v3ENUbQX4jV3Mg==", - "dependencies": { - "deepmerge": "deepmerge@4.3.1", - "escape-string-regexp": "escape-string-regexp@4.0.0", - "htmlparser2": "htmlparser2@8.0.2", - "is-plain-object": "is-plain-object@5.0.0", - "parse-srcset": "parse-srcset@1.0.2", - "postcss": "postcss@8.5.3" - } - }, - "source-map-js@1.2.1": { - "integrity": "sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==", - "dependencies": {} - }, - "tr46@0.0.3": { - "integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==", - "dependencies": {} - }, - "turndown@7.2.0": { - "integrity": "sha512-eCZGBN4nNNqM9Owkv9HAtWRYfLA4h909E/WGAWWBpmB275ehNhZyk87/Tpvjbp0jjNl9XwCsbe6bm6CqFsgD+A==", - "dependencies": { - "@mixmark-io/domino": "@mixmark-io/domino@2.2.0" - } - }, - "undici-types@5.26.5": { - "integrity": "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==", - "dependencies": {} - }, - "web-streams-polyfill@4.0.0-beta.3": { - "integrity": "sha512-QW95TCTaHmsYfHDybGMwO5IJIM93I/6vTRk+daHTWFPhwh+C8Cg7j7XyKrwrj8Ib6vYXe0ocYNrmzY4xAAN6ug==", - "dependencies": {} - }, - "webidl-conversions@3.0.1": { - "integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==", - "dependencies": {} - }, - "whatwg-url@5.0.0": { - "integrity": "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==", - "dependencies": { - "tr46": "tr46@0.0.3", - "webidl-conversions": "webidl-conversions@3.0.1" - } - }, - "ws@8.18.1": { - "integrity": "sha512-RKW2aJZMXeMxVpnZ6bck+RswznaxmzdULiBr6KY7XkTnW8uvt0iT9H5DkHUChXrc+uurzwa0rVI16n/Xzjdz1w==", - "dependencies": {} - }, - "ws@8.18.2": { - "integrity": "sha512-DMricUmwGZUVr++AEAe2uiVM7UoO9MAVZMDu05UQOaUII0lp+zOzLLU4Xqh/JvTqklB1T4uELaaPBKyjE1r4fQ==", - "dependencies": {} - } - } - }, - "remote": { - "https://deno.land/std@0.168.0/async/abortable.ts": "80b2ac399f142cc528f95a037a7d0e653296352d95c681e284533765961de409", - "https://deno.land/std@0.168.0/async/deadline.ts": "2c2deb53c7c28ca1dda7a3ad81e70508b1ebc25db52559de6b8636c9278fd41f", - "https://deno.land/std@0.168.0/async/debounce.ts": "60301ffb37e730cd2d6f9dadfd0ecb2a38857681bd7aaf6b0a106b06e5210a98", - "https://deno.land/std@0.168.0/async/deferred.ts": "77d3f84255c3627f1cc88699d8472b664d7635990d5358c4351623e098e917d6", - "https://deno.land/std@0.168.0/async/delay.ts": "5a9bfba8de38840308a7a33786a0155a7f6c1f7a859558ddcec5fe06e16daf57", - "https://deno.land/std@0.168.0/async/mod.ts": "7809ad4bb223e40f5fdc043e5c7ca04e0e25eed35c32c3c32e28697c553fa6d9", - "https://deno.land/std@0.168.0/async/mux_async_iterator.ts": "770a0ff26c59f8bbbda6b703a2235f04e379f73238e8d66a087edc68c2a2c35f", - "https://deno.land/std@0.168.0/async/pool.ts": "6854d8cd675a74c73391c82005cbbe4cc58183bddcd1fbbd7c2bcda42b61cf69", - "https://deno.land/std@0.168.0/async/retry.ts": "e8e5173623915bbc0ddc537698fa418cf875456c347eda1ed453528645b42e67", - "https://deno.land/std@0.168.0/async/tee.ts": "3a47cc4e9a940904fd4341f0224907e199121c80b831faa5ec2b054c6d2eff5e", - "https://deno.land/std@0.168.0/http/server.ts": "e99c1bee8a3f6571ee4cdeb2966efad465b8f6fe62bec1bdb59c1f007cc4d155" - }, - "workspace": { - "dependencies": [ - "jsr:@henrygd/queue@^1.0.7", - "jsr:@pgflow/edge-worker@0.0.0-array-map-steps-cd94242a-20251008042921", - "jsr:@supabase/supabase-js@^2.49.4", - "npm:@pgflow/core@0.0.0-array-map-steps-cd94242a-20251008042921", - "npm:@pgflow/dsl@0.0.0-array-map-steps-cd94242a-20251008042921", - "npm:groq-sdk@^0.20.1", - "npm:postgres@3.4.5", - "npm:sanitize-html@^2.16.0", - "npm:turndown@^7.2.0" - ] - } -} diff --git a/examples/playground/supabase/functions/import_map.local.json b/examples/playground/supabase/functions/import_map.local.json deleted file mode 100644 index 6c63bbe06..000000000 --- a/examples/playground/supabase/functions/import_map.local.json +++ /dev/null @@ -1,18 +0,0 @@ -{ - "imports": { - "@pgflow/core": "./_vendor/@pgflow/core/index.js", - "@pgflow/core/": "./_vendor/@pgflow/core/", - "@pgflow/dsl": "./_vendor/@pgflow/dsl/index.js", - "@pgflow/dsl/": "./_vendor/@pgflow/dsl/", - "@pgflow/dsl/supabase": "./_vendor/@pgflow/dsl/supabase.js", - "@pgflow/edge-worker": "./_vendor/@pgflow/edge-worker/index.ts", - "@pgflow/edge-worker/": "./_vendor/@pgflow/edge-worker/", - "@pgflow/edge-worker/_internal": "./_vendor/@pgflow/edge-worker/_internal.ts", - "@henrygd/queue": "jsr:@henrygd/queue@^1.0.7", - "@supabase/supabase-js": "jsr:@supabase/supabase-js@^2.49.4", - "groq-sdk": "npm:groq-sdk@^0.20.1", - "postgres": "npm:postgres@3.4.5", - "sanitize-html": "npm:sanitize-html@^2.16.0", - "turndown": "npm:turndown@^7.2.0" - } -} \ No newline at end of file diff --git a/examples/playground/supabase/functions/pgflow-cron-worker/DEPLOY.md b/examples/playground/supabase/functions/pgflow-cron-worker/DEPLOY.md deleted file mode 100644 index d5466cbbe..000000000 --- a/examples/playground/supabase/functions/pgflow-cron-worker/DEPLOY.md +++ /dev/null @@ -1,149 +0,0 @@ -# Deploying pgflow Cron Worker to Supabase - -This guide explains how to deploy the pgflow cron worker to your hosted Supabase project. - -## Prerequisites - -- Supabase CLI installed and configured -- Access to your Supabase project dashboard -- Your project's anon key and URL - -## Step 1: Deploy the Edge Function - -Deploy the pgflow-cron-worker edge function to your Supabase project: - -```bash -supabase functions deploy pgflow-cron-worker -``` - -## Step 2: Set Environment Variables - -In your Supabase dashboard: - -1. Go to Settings → Edge Functions -2. Find `pgflow-cron-worker` -3. Add the following environment variable: - - `EDGE_WORKER_DB_URL`: Your database connection string (use the connection pooler URL from Settings → Database) - -## Step 3: Enable Required Extensions - -Run this SQL in your Supabase SQL editor: - -```sql --- Enable required extensions -CREATE EXTENSION IF NOT EXISTS pg_cron; -CREATE EXTENSION IF NOT EXISTS pg_net; - --- Grant necessary permissions -GRANT USAGE ON SCHEMA cron TO postgres; -GRANT USAGE ON SCHEMA net TO postgres; -``` - -## Step 4: Create the Cron Job - -Replace the placeholders and run this SQL in your Supabase SQL editor: - -```sql --- Remove existing job if it exists -SELECT cron.unschedule(jobname) -FROM cron.job -WHERE jobname = 'pgflow-worker--analyze_website'; - --- Create the cron job --- Replace YOUR_PROJECT_REF with your Supabase project reference (e.g., 'abcdefghijklmnop') --- Replace YOUR_ANON_KEY with your project's anon key -SELECT cron.schedule( - 'pgflow-worker--analyze_website', -- job name - '*/4 * * * * *', -- every 4 seconds - $$ - SELECT net.http_post( - url := 'https://YOUR_PROJECT_REF.supabase.co/functions/v1/pgflow-cron-worker', - headers := jsonb_build_object( - 'Content-Type', 'application/json', - 'Authorization', 'Bearer YOUR_ANON_KEY' - ), - body := jsonb_build_object( - 'flow_slug', 'analyze_website', - 'batch_size', 10, - 'max_concurrent', 5, - 'cron_interval_seconds', 4 - ), - timeout_milliseconds := 25000 - ) as request_id; - $$ -); -``` - -## Step 5: Verify the Deployment - -Check that the cron job is created: - -```sql --- View scheduled jobs -SELECT * FROM cron.job WHERE jobname = 'pgflow-worker--analyze_website'; - --- Check recent job runs (after a minute) -SELECT * FROM cron.job_run_details -WHERE jobname = 'pgflow-worker--analyze_website' -ORDER BY start_time DESC -LIMIT 10; - --- Monitor HTTP requests -SELECT - created, - url, - status_code, - response_body -FROM net._http_response -WHERE url LIKE '%pgflow-cron-worker%' -ORDER BY created DESC -LIMIT 10; -``` - -## Configuration Parameters - -When creating the cron job, you must provide all these parameters: - -- `flow_slug`: The slug of the flow to process -- `batch_size`: Number of tasks to process in each batch -- `max_concurrent`: Maximum concurrent task executions -- `cron_interval_seconds`: How often the cron runs (should match your cron schedule) - -## Adjusting the Schedule - -The cron schedule uses standard cron syntax. Common examples: - -- `*/4 * * * * *` - Every 4 seconds -- `*/30 * * * * *` - Every 30 seconds -- `* * * * *` - Every minute -- `*/5 * * * *` - Every 5 minutes -- `0 * * * *` - Every hour - -Remember to update `cron_interval_seconds` in the request body to match your schedule. - -## Stopping the Cron Job - -To stop the cron job: - -```sql -SELECT cron.unschedule('pgflow-worker--analyze_website'); -``` - -## Troubleshooting - -1. **Function not found**: Ensure the edge function is deployed -2. **Authentication errors**: Verify your anon key is correct -3. **No tasks processing**: Check that the flow exists and has pending tasks -4. **Worker registration errors**: Ensure the pgflow workers table exists - -## Security Note - -For production, consider storing your anon key as a database setting instead of hardcoding it: - -```sql --- Set the anon key as a database setting -ALTER DATABASE postgres SET app.supabase_anon_key = 'YOUR_ANON_KEY'; - --- Then use this in your cron job -'Authorization', 'Bearer ' || current_setting('app.supabase_anon_key') -``` \ No newline at end of file diff --git a/examples/playground/supabase/functions/pgflow-cron-worker/README.md b/examples/playground/supabase/functions/pgflow-cron-worker/README.md deleted file mode 100644 index 8457b9c59..000000000 --- a/examples/playground/supabase/functions/pgflow-cron-worker/README.md +++ /dev/null @@ -1,167 +0,0 @@ -# pgflow Cron Worker - -This Edge Function implements a cron-based worker architecture for pgflow, designed to be triggered by pg_cron instead of running as a persistent worker. - -## Overview - -The cron-based worker solves the scaling and control issues of persistent Edge Workers by: -- Running on a predictable schedule (every 5 seconds) -- Processing one batch of tasks per invocation -- Terminating cleanly after each run -- Avoiding uncontrolled worker spawning - -## Dependencies - -This function requires: -- `@pgflow/edge-worker@0.3.1` - For internal worker components -- `@pgflow/core@0.3.1` - For PgflowSqlClient -- `@pgflow/dsl@0.3.1` - For flow type definitions -- `postgres@3.4.5` - PostgreSQL client - -Make sure all versions are aligned with what edge-worker expects. - -## Environment Variables - -This function uses the same environment variables as other Edge Workers. The required `EDGE_WORKER_DB_URL` is already configured in `supabase/functions/.env`. - -## How it works - -Unlike the traditional Edge Worker that runs continuously, this function: -- Processes a single batch of tasks per HTTP request -- Is triggered periodically by pg_cron (every 5 seconds) -- Completes and terminates after processing -- Avoids the scaling issues of persistent Edge Workers - -## Quick Start - -### 1. Start the Edge Functions -```bash -npm run start-functions -``` - -### 2. Start the cron job -```bash -cd supabase/functions/pgflow-cron-worker -./start-cron.sh -``` - -This will: -- Get the database URL from `supabase status` -- Create a pg_cron job that runs every 5 seconds -- Start triggering the Edge Function automatically - -### 3. Monitor execution -Check the cron job status and monitor HTTP responses using SQL queries to verify the worker is running correctly. - -### 4. Stop the cron job -```bash -./stop-cron.sh -``` - -## Helper Scripts - -### start-cron.sh -Starts the pg_cron job by: -- Getting the database URL dynamically from `supabase status` -- Running the setup SQL to create the cron schedule -- Configuring the job to call the Edge Function every 5 seconds - -### stop-cron.sh -Stops the pg_cron job by: -- Getting the database URL dynamically -- Unscheduling the cron job - -### test-local.sh -Tests the Edge Function directly without cron: -```bash -./test-local.sh -``` - -### setup-cron-local.sql -SQL script that creates the pg_cron job with: -- Schedule: '5 seconds' (using interval syntax) -- URL: http://host.docker.internal:54321/functions/v1/pgflow-cron-worker -- Payload: Configures flow_slug, batch_size, and max_concurrent - -## Usage - -### 1. Deploy the function - -```bash -supabase functions deploy pgflow-cron-worker -``` - -### 2. Set up pg_cron - -Create a cron job in your database to trigger the worker: - -```sql --- Schedule polling every 5 seconds -SELECT cron.schedule( - 'pgflow-analyze-website-worker', - '*/5 * * * * *', -- Every 5 seconds - $$ - SELECT net.http_post( - url := 'https://your-project-ref.supabase.co/functions/v1/pgflow-cron-worker', - headers := jsonb_build_object( - 'Content-Type', 'application/json', - 'Authorization', 'Bearer ' || current_setting('app.supabase_anon_key') - ), - body := jsonb_build_object( - 'flow_slug', 'analyze_website', - 'batch_size', 10, - 'max_concurrent', 5 - ), - timeout_milliseconds := 25000 -- 25 second timeout - ) as request_id; - $$ -); -``` - -### 3. Test the function - -You can test the function directly: - -```bash -curl -X POST https://your-project-ref.supabase.co/functions/v1/pgflow-cron-worker \ - -H "Authorization: Bearer YOUR_ANON_KEY" \ - -H "Content-Type: application/json" \ - -d '{ - "flow_slug": "analyze_website", - "batch_size": 10, - "max_concurrent": 5 - }' -``` - -## Configuration - -The function accepts the following parameters: - -- `flow_slug` (required): The slug of the flow to process tasks for -- `batch_size` (optional, default: 10): Number of tasks to poll in a single batch -- `max_concurrent` (optional, default: 5): Maximum concurrent task executions - -## Monitoring - -The function returns JSON responses with: -- `status`: "completed" or "error" -- `flow_slug`: The flow that was processed -- `batch_size`: Number of tasks requested -- `max_concurrent`: Concurrency limit used -- `worker_id`: Unique ID of this worker instance -- `error`: Error message (only on failure) - -## Adding more flows - -To add support for more flows, edit `index.ts` and add your flow to the `flows` Map: - -```typescript -import myNewFlow from '../_flows/my_new_flow.ts'; - -const flows = new Map([ - ['analyze_website', analyzeWebsiteFlow], - ['my_new_flow', myNewFlow], // Add your flow here -]); -``` - -Then create a corresponding pg_cron job for the new flow. \ No newline at end of file diff --git a/examples/playground/supabase/functions/pgflow-cron-worker/index.ts b/examples/playground/supabase/functions/pgflow-cron-worker/index.ts deleted file mode 100644 index 8af956b24..000000000 --- a/examples/playground/supabase/functions/pgflow-cron-worker/index.ts +++ /dev/null @@ -1,271 +0,0 @@ -import { serve } from 'https://deno.land/std@0.168.0/http/server.ts'; -import postgres from 'postgres'; -import * as internal from '@pgflow/edge-worker/_internal'; -import { PgflowSqlClient } from '@pgflow/core'; -import type { AnyFlow } from '@pgflow/dsl'; - -// Import flow definitions -import analyzeWebsiteFlow from '../_flows/analyze_website.ts'; - -const flows = new Map([ - ['analyze_website', analyzeWebsiteFlow], - // Add more flows here as needed -]); - -// Create platform adapter to get access to platform resources -const platformAdapter = internal.platform.createAdapter(); - -// Initialize all components outside request handler -const loggingFactory = internal.platform.createLoggingFactory(); -const workerId = crypto.randomUUID(); -loggingFactory.setWorkerId(workerId); - -const sql = postgres(Deno.env.get('EDGE_WORKER_DB_URL')!, { - max: 10, // Reasonable default for cron-based execution - prepare: false, -}); - -// Create abort controller without timeout - can be triggered on demand -const abortController = new AbortController(); - -// Create loggers -const logger = loggingFactory.createLogger('CronWorker'); - -// Create queries instance for worker management -const queries = new internal.core.Queries(sql); - -// Register worker once at startup -const workerRow = await queries.onWorkerStarted({ - queueName: 'analyze_website', // Default queue name - workerId: workerId, - edgeFunctionName: 'pgflow-cron-worker', -}); - -// Track heartbeat timing -let lastHeartbeat = 0; -const heartbeatInterval = 4000; // Send heartbeat every 4 seconds - -// Function to process a batch for a specific flow -async function processBatchForFlow( - flowDef: TFlow, - flow_slug: string, - batch_size: number, - max_concurrent: number, -) { - // Create pgflow SQL client with proper type - const pgflowClient = new PgflowSqlClient(sql); - - // Create poller for this flow - const poller = new internal.flow.StepTaskPoller( - pgflowClient, - abortController.signal, - { - batchSize: batch_size, - queueName: flow_slug, - visibilityTimeout: 30, - maxPollSeconds: 5, // Short poll - no need to match cron interval - pollIntervalMs: 100, // Fast polling within the 2 seconds - }, - () => workerId, - loggingFactory.createLogger('StepTaskPoller'), - ); - - // Create execution controller for this flow - const executorFactory = ( - taskWithMessage: any, // StepTaskWithMessage - type not exported through internal API - signal: AbortSignal - ) => { - // Debug logging - logger.info('ExecutorFactory received:', { - hasMessage: !!taskWithMessage?.message, - hasTask: !!taskWithMessage?.task, - hasMsgId: !!taskWithMessage?.msg_id, - keys: taskWithMessage ? Object.keys(taskWithMessage) : [], - taskWithMessage: JSON.stringify(taskWithMessage) - }); - - // Build context for StepTaskExecutor - const context = { - // Core platform resources - env: Deno.env.toObject(), - shutdownSignal: abortController.signal, - - // Step task execution context - rawMessage: taskWithMessage.message, - stepTask: taskWithMessage.task, - - // Platform-specific resources (Supabase) - ...platformAdapter.platformResources - }; - - return new internal.flow.StepTaskExecutor( - flowDef, - pgflowClient, - signal, - loggingFactory.createLogger('StepTaskExecutor'), - context - ); - }; - - const executionController = new internal.core.ExecutionController( - executorFactory, - abortController.signal, - { maxConcurrent: max_concurrent }, - loggingFactory.createLogger('ExecutionController'), - ); - - // Create and use BatchProcessor for single batch - const batchProcessor = new internal.core.BatchProcessor( - executionController, - poller, - abortController.signal, - loggingFactory.createLogger('BatchProcessor'), - ); - - // Process one batch - const startTime = Date.now(); - logger.info(`Starting batch processing for flow: ${flow_slug}`); - - await batchProcessor.processBatch(); - - // Wait for completion - await executionController.awaitCompletion(); - - const duration = Date.now() - startTime; - logger.info(`Batch processing completed for flow: ${flow_slug}`, { - duration_ms: duration, - batch_size, - max_concurrent, - }); -} - -// Helper function to send heartbeat -async function sendHeartbeat() { - const now = Date.now(); - if (now - lastHeartbeat >= heartbeatInterval) { - const result = await queries.sendHeartbeat(workerRow); - logger.debug(result.is_deprecated ? 'DEPRECATED' : 'OK'); - lastHeartbeat = now; - - if (result.is_deprecated) { - logger.warn('Worker marked for deprecation'); - // In a cron worker, we might want to handle this differently - // For now, just log the warning - } - } -} - -serve(async (req) => { - try { - // Send heartbeat - await sendHeartbeat(); - - const body = await req.json(); - const { flow_slug, batch_size, max_concurrent, cron_interval_seconds } = - body; - - // Validate required parameters - const missingParams = []; - if (!flow_slug) missingParams.push('flow_slug'); - if (batch_size === undefined || batch_size === null) - missingParams.push('batch_size'); - if (max_concurrent === undefined || max_concurrent === null) - missingParams.push('max_concurrent'); - if (cron_interval_seconds === undefined || cron_interval_seconds === null) - missingParams.push('cron_interval_seconds'); - - if (missingParams.length > 0) { - return new Response( - JSON.stringify({ - error: `Missing required parameters: ${missingParams.join(', ')}`, - required_params: [ - 'flow_slug', - 'batch_size', - 'max_concurrent', - 'cron_interval_seconds', - ], - }), - { - status: 400, - headers: { 'Content-Type': 'application/json' }, - }, - ); - } - - const flow = flows.get(flow_slug); - if (!flow) { - return new Response( - JSON.stringify({ error: `Unknown flow: ${flow_slug}` }), - { - status: 400, - headers: { 'Content-Type': 'application/json' }, - }, - ); - } - - logger.info(`Processing batch for flow: ${flow_slug}`, { - batch_size, - max_concurrent, - cron_interval_seconds, - }); - - const processingStartTime = Date.now(); - const maxExecutionTime = (cron_interval_seconds - 1) * 1000; // Leave 1 second buffer - const maxIterations = cron_interval_seconds * 4; // Safety limit - - let iterations = 0; - let totalProcessed = 0; - - // Keep processing batches until we're close to the next cron invocation - while ( - Date.now() - processingStartTime < maxExecutionTime && - iterations < maxIterations - ) { - iterations++; - - logger.info(`Starting batch iteration ${iterations}`); - - // Send heartbeat before each batch - await sendHeartbeat(); - - await processBatchForFlow(flow, flow_slug, batch_size, max_concurrent); - totalProcessed++; - - // Small delay to prevent tight loop if no tasks available - await new Promise((resolve) => setTimeout(resolve, 100)); - } - - const totalDuration = Date.now() - processingStartTime; - - const response = { - status: 'completed', - flow_slug, - batch_size, - max_concurrent, - cron_interval_seconds, - worker_id: workerId, - duration_ms: totalDuration, - iterations: totalProcessed, - timestamp: new Date().toISOString(), - }; - - logger.info('Request completed successfully', response); - - return new Response(JSON.stringify(response), { - headers: { 'Content-Type': 'application/json' }, - }); - } catch (error) { - logger.error('Error processing batch:', error); - return new Response( - JSON.stringify({ - status: 'error', - error: error.message, - worker_id: workerId, - }), - { - status: 500, - headers: { 'Content-Type': 'application/json' }, - }, - ); - } -}); diff --git a/examples/playground/supabase/functions/pgflow-cron-worker/setup-cron-local.sql b/examples/playground/supabase/functions/pgflow-cron-worker/setup-cron-local.sql deleted file mode 100644 index c6f18bd51..000000000 --- a/examples/playground/supabase/functions/pgflow-cron-worker/setup-cron-local.sql +++ /dev/null @@ -1,52 +0,0 @@ --- Setup pg_cron job for pgflow cron worker (LOCAL DEVELOPMENT) --- This creates a cron job that triggers the Edge Function every 5 seconds - --- First, ensure pg_cron and pg_net extensions are enabled -CREATE EXTENSION IF NOT EXISTS pg_cron; -CREATE EXTENSION IF NOT EXISTS pg_net; - --- Grant necessary permissions (if not already granted) -GRANT USAGE ON SCHEMA cron TO postgres; -GRANT USAGE ON SCHEMA net TO postgres; - --- Remove existing job if it exists to prevent duplicates -SELECT cron.unschedule(jobname) -FROM cron.job -WHERE jobname = 'pgflow-worker--analyze_website'; - --- Create the cron job for analyze_website flow (LOCAL VERSION) --- This uses localhost URLs for local development -SELECT cron.schedule( - 'pgflow-worker--analyze_website', -- job name - '4 seconds', -- run every 4 seconds using interval syntax - $$ - SELECT net.http_post( - url := 'http://host.docker.internal:54321/functions/v1/pgflow-cron-worker', - headers := jsonb_build_object( - 'Content-Type', 'application/json', - 'Authorization', 'Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJzdXBhYmFzZS1kZW1vIiwicm9sZSI6ImFub24iLCJleHAiOjE5ODM4MTI5OTZ9.CRXP1A7WOeoJeXxjNni43kdQwgnWNReilDMblYTn_I0' - ), - body := jsonb_build_object( - 'flow_slug', 'analyze_website', - 'batch_size', 5, - 'max_concurrent', 3, - 'cron_interval_seconds', 4 - ), - timeout_milliseconds := 25000 - ) as request_id; - $$ -); - --- View scheduled jobs -SELECT * FROM cron.job; - --- View recent job runs --- Note: Column names may vary by pg_cron version --- This query attempts to show job execution details - --- To unschedule the job: --- SELECT cron.unschedule('pgflow-worker--analyze_website'); - --- Monitor pg_net requests (useful for debugging) --- Check the net._http_response table for request logs --- SELECT * FROM net._http_response ORDER BY id DESC LIMIT 10; \ No newline at end of file diff --git a/examples/playground/supabase/functions/pgflow-cron-worker/setup-cron.sql b/examples/playground/supabase/functions/pgflow-cron-worker/setup-cron.sql deleted file mode 100644 index 08912bcdc..000000000 --- a/examples/playground/supabase/functions/pgflow-cron-worker/setup-cron.sql +++ /dev/null @@ -1,92 +0,0 @@ --- Setup pg_cron job for pgflow cron worker --- This creates a cron job that triggers the Edge Function every 5 seconds - --- First, ensure pg_cron and pg_net extensions are enabled -CREATE EXTENSION IF NOT EXISTS pg_cron; -CREATE EXTENSION IF NOT EXISTS pg_net; - --- Grant necessary permissions (if not already granted) -GRANT USAGE ON SCHEMA cron TO postgres; -GRANT USAGE ON SCHEMA net TO postgres; - --- Remove existing job if it exists to prevent duplicates -SELECT cron.unschedule(jobname) -FROM cron.job -WHERE jobname = 'pgflow-worker--analyze_website'; - --- Create the cron job for analyze_website flow --- Replace 'YOUR_PROJECT_REF' with your actual Supabase project reference --- Replace 'YOUR_ANON_KEY' with your actual anon key (or use the setting approach below) -SELECT cron.schedule( - 'pgflow-worker--analyze_website', -- job name - '*/4 * * * * *', -- every 4 seconds - $$ - SELECT net.http_post( - url := 'https://YOUR_PROJECT_REF.supabase.co/functions/v1/pgflow-cron-worker', - headers := jsonb_build_object( - 'Content-Type', 'application/json', - 'Authorization', 'Bearer YOUR_ANON_KEY' - ), - body := jsonb_build_object( - 'flow_slug', 'analyze_website', - 'batch_size', 10, - 'max_concurrent', 5, - 'cron_interval_seconds', 4 - ), - timeout_milliseconds := 25000 - ) as request_id; - $$ -); - --- Alternative: Use database settings for anon key (more secure) --- First set the anon key as a database setting: --- ALTER DATABASE postgres SET app.supabase_anon_key = 'YOUR_ANON_KEY'; - --- Then use this version of the cron job: -/* -SELECT cron.schedule( - 'pgflow-analyze-website-worker', - '*/5 * * * * *', - $$ - SELECT net.http_post( - url := 'https://YOUR_PROJECT_REF.supabase.co/functions/v1/pgflow-cron-worker', - headers := jsonb_build_object( - 'Content-Type', 'application/json', - 'Authorization', 'Bearer ' || current_setting('app.supabase_anon_key') - ), - body := jsonb_build_object( - 'flow_slug', 'analyze_website', - 'batch_size', 10, - 'max_concurrent', 5 - ), - timeout_milliseconds := 25000 - ) as request_id; - $$ -); -*/ - --- View scheduled jobs -SELECT * FROM cron.job; - --- View job run details (useful for debugging) -SELECT * FROM cron.job_run_details -WHERE jobname = 'pgflow-worker--analyze_website' -ORDER BY start_time DESC -LIMIT 10; - --- To unschedule the job later: --- SELECT cron.unschedule('pgflow-worker--analyze_website'); - --- Monitor pg_net requests (useful for debugging) -/* -SELECT - created, - url, - status_code, - response_body, - error_msg -FROM net._http_response -WHERE url LIKE '%pgflow-cron-worker%' -ORDER BY created DESC -LIMIT 10; -*/ \ No newline at end of file diff --git a/examples/playground/supabase/functions/pgflow-cron-worker/start-cron.sh b/examples/playground/supabase/functions/pgflow-cron-worker/start-cron.sh deleted file mode 100755 index 65438260c..000000000 --- a/examples/playground/supabase/functions/pgflow-cron-worker/start-cron.sh +++ /dev/null @@ -1,34 +0,0 @@ -#!/bin/bash - -# Start the pg_cron job for the pgflow cron worker -# This script gets the database URL from supabase status - -SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" -PROJECT_ROOT="$SCRIPT_DIR/../../.." - -# Get database URL from supabase status -echo "Getting database URL from supabase status..." -DB_URL=$(cd "$PROJECT_ROOT" && ./scripts/supabase status --output json | jq -r '.DB_URL') - -if [ -z "$DB_URL" ] || [ "$DB_URL" = "null" ]; then - echo "Error: Could not get database URL from supabase status" - echo "Make sure supabase is running: npm run supabase:start" - exit 1 -fi - -echo "Using database URL: $DB_URL" -echo "" -echo "Setting up pg_cron job for pgflow-cron-worker..." -echo "" - -# Run the SQL script (it already handles duplicate prevention) -psql "$DB_URL" -f "$SCRIPT_DIR/setup-cron-local.sql" - -echo "" -echo "Done! The cron job should now be running every 4 seconds." -echo "" -echo "To check the job status, run:" -echo " psql \"$DB_URL\" -c \"SELECT * FROM cron.job;\"" -echo "" -echo "To stop the cron job, run:" -echo " ./stop-cron.sh" \ No newline at end of file diff --git a/examples/playground/supabase/functions/pgflow-cron-worker/stop-cron.sh b/examples/playground/supabase/functions/pgflow-cron-worker/stop-cron.sh deleted file mode 100755 index 7c3a3e034..000000000 --- a/examples/playground/supabase/functions/pgflow-cron-worker/stop-cron.sh +++ /dev/null @@ -1,25 +0,0 @@ -#!/bin/bash - -# Stop the pg_cron job for the pgflow cron worker - -SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" -PROJECT_ROOT="$SCRIPT_DIR/../../.." - -# Get database URL from supabase status -echo "Getting database URL from supabase status..." -DB_URL=$(cd "$PROJECT_ROOT" && ./scripts/supabase status --output json | jq -r '.DB_URL') - -if [ -z "$DB_URL" ] || [ "$DB_URL" = "null" ]; then - echo "Error: Could not get database URL from supabase status" - echo "Make sure supabase is running: npm run supabase:start" - exit 1 -fi - -echo "Stopping pg_cron job for pgflow-cron-worker..." -echo "" - -# Unschedule the cron job using the exact job name from setup-cron-local.sql -psql "$DB_URL" -c "SELECT cron.unschedule('pgflow-worker--analyze_website');" - -echo "" -echo "Cron job stopped." \ No newline at end of file diff --git a/examples/playground/supabase/functions/pgflow-cron-worker/test-local.sh b/examples/playground/supabase/functions/pgflow-cron-worker/test-local.sh deleted file mode 100755 index 102f2c2af..000000000 --- a/examples/playground/supabase/functions/pgflow-cron-worker/test-local.sh +++ /dev/null @@ -1,30 +0,0 @@ -#!/bin/bash - -# Test the cron worker locally -# Usage: ./test-local.sh - -echo "Testing pgflow-cron-worker locally..." - -# Get the function URL (assumes local Supabase is running) -FUNCTION_URL="http://localhost:54321/functions/v1/pgflow-cron-worker" - -# Get the anon key from environment or use default local key -ANON_KEY="${SUPABASE_ANON_KEY:-eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJzdXBhYmFzZS1kZW1vIiwicm9sZSI6ImFub24iLCJleHAiOjE5ODM4MTI5OTZ9.CRXP1A7WOeoJeXxjNni43kdQwgnWNReilDMblYTn_I0}" - -# Test payload -PAYLOAD='{ - "flow_slug": "analyze_website", - "batch_size": 5, - "max_concurrent": 3 -}' - -echo "Sending request to: $FUNCTION_URL" -echo "Payload: $PAYLOAD" -echo "" - -# Make the request -curl -X POST "$FUNCTION_URL" \ - -H "Authorization: Bearer $ANON_KEY" \ - -H "Content-Type: application/json" \ - -d "$PAYLOAD" \ - -w "\n\nHTTP Status: %{http_code}\nTime: %{time_total}s\n" \ No newline at end of file diff --git a/examples/playground/supabase/functions/utils.ts b/examples/playground/supabase/functions/utils.ts deleted file mode 100644 index 9eda46f8a..000000000 --- a/examples/playground/supabase/functions/utils.ts +++ /dev/null @@ -1,23 +0,0 @@ -// this function sleeps for ms number of milliseconds -export async function sleep(ms: number) { - return new Promise((resolve) => setTimeout(resolve, ms)); -} - -// this function sleeps for a random number of milliseconds between min and max -export async function randomSleep(min: number, max: number) { - const ms = Math.floor(Math.random() * (max - min + 1) + min); - await sleep(ms); -} - -const FAILURE_URL = 'https://firebase.google.com/'; - -/** - * Simulates a random failure with a 50% probability - * Optionally checks for a specific value that always fails - */ -export async function simulateFailure(url: string): Promise { - // Only fail if random is greater than 0.5 (50% chance) - if (url === FAILURE_URL) { - throw new Error('Simulated random failure to demonstrate error handling'); - } -} diff --git a/examples/playground/supabase/migrations/20250429064909_fix_db_reset.sql b/examples/playground/supabase/migrations/20250429064909_fix_db_reset.sql deleted file mode 100644 index 8c2363aed..000000000 --- a/examples/playground/supabase/migrations/20250429064909_fix_db_reset.sql +++ /dev/null @@ -1,31 +0,0 @@ --- This migration fixes any issues that might occur with the pgflow initial migration --- It ensures all required schemas and extensions exist - --- Ensure pgflow schema exists -do $$ -BEGIN - IF NOT EXISTS (SELECT 1 FROM pg_namespace WHERE nspname = 'pgflow') THEN - CREATE SCHEMA "pgflow"; - END IF; -END -$$; - --- Ensure pgmq schema exists -do $$ -BEGIN - IF NOT EXISTS (SELECT 1 FROM pg_namespace WHERE nspname = 'pgmq') THEN - CREATE SCHEMA "pgmq"; - END IF; -END -$$; - --- Ensure pgmq extension exists -do $$ -BEGIN - IF NOT EXISTS ( - SELECT 1 FROM pg_extension WHERE extname = 'pgmq' - ) THEN - CREATE EXTENSION "pgmq" WITH SCHEMA "pgmq" VERSION "1.4.4"; - END IF; -END -$$; diff --git a/examples/playground/supabase/migrations/20250429164909_pgflow_initial.sql b/examples/playground/supabase/migrations/20250429164909_pgflow_initial.sql deleted file mode 100644 index 3bc6b6837..000000000 --- a/examples/playground/supabase/migrations/20250429164909_pgflow_initial.sql +++ /dev/null @@ -1,766 +0,0 @@ --- These statements are now handled in 20250504000000_fix_pgflow_schema_creation.sql --- to ensure they work correctly with Supabase's database reset behavior --- Add new schema named "pgflow" --- create schema "pgflow"; --- -- Add new schema named "pgmq" --- CREATE SCHEMA "pgmq"; --- -- Create extension "pgmq" --- CREATE EXTENSION "pgmq" WITH SCHEMA "pgmq" VERSION "1.4.4"; --- Create "read_with_poll" function -create function "pgflow"."read_with_poll"( - "queue_name" text, - "vt" integer, - "qty" integer, - "max_poll_seconds" integer default 5, - "poll_interval_ms" integer default 100, - "conditional" jsonb default '{}' -) returns setof pgmq.message_record language plpgsql as $$ -DECLARE - r pgmq.message_record; - stop_at TIMESTAMP; - sql TEXT; - qtable TEXT := pgmq.format_table_name(queue_name, 'q'); -BEGIN - stop_at := clock_timestamp() + make_interval(secs => max_poll_seconds); - LOOP - IF (SELECT clock_timestamp() >= stop_at) THEN - RETURN; - END IF; - - sql := FORMAT( - $QUERY$ - WITH cte AS - ( - SELECT msg_id - FROM pgmq.%I - WHERE vt <= clock_timestamp() AND CASE - WHEN %L != '{}'::jsonb THEN (message @> %2$L)::integer - ELSE 1 - END = 1 - ORDER BY msg_id ASC - LIMIT $1 - FOR UPDATE SKIP LOCKED - ) - UPDATE pgmq.%I m - SET - vt = clock_timestamp() + %L, - read_ct = read_ct + 1 - FROM cte - WHERE m.msg_id = cte.msg_id - RETURNING m.msg_id, m.read_ct, m.enqueued_at, m.vt, m.message; - $QUERY$, - qtable, conditional, qtable, make_interval(secs => vt) - ); - - FOR r IN - EXECUTE sql USING qty - LOOP - RETURN NEXT r; - END LOOP; - IF FOUND THEN - RETURN; - ELSE - PERFORM pg_sleep(poll_interval_ms::numeric / 1000); - END IF; - END LOOP; -END; -$$; --- Create composite type "step_task_record" -create type "pgflow"."step_task_record" as ( - "flow_slug" text, "run_id" uuid, "step_slug" text, "input" jsonb, "msg_id" bigint -); --- Create "is_valid_slug" function -create function "pgflow"."is_valid_slug"("slug" text) returns boolean language plpgsql immutable as $$ -begin - return - slug is not null - and slug <> '' - and length(slug) <= 128 - and slug ~ '^[a-zA-Z_][a-zA-Z0-9_]*$' - and slug NOT IN ('run'); -- reserved words -end; -$$; --- Create "flows" table -create table "pgflow"."flows" ( - "flow_slug" text not null, - "opt_max_attempts" integer not null default 3, - "opt_base_delay" integer not null default 1, - "opt_timeout" integer not null default 60, - "created_at" timestamptz not null default now(), - primary key ("flow_slug"), - constraint "opt_base_delay_is_nonnegative" check (opt_base_delay >= 0), - constraint "opt_max_attempts_is_nonnegative" check (opt_max_attempts >= 0), - constraint "opt_timeout_is_positive" check (opt_timeout > 0), - constraint "slug_is_valid" check (pgflow.is_valid_slug(flow_slug)) -); --- Create "steps" table -create table "pgflow"."steps" ( - "flow_slug" text not null, - "step_slug" text not null, - "step_type" text not null default 'single', - "step_index" integer not null default 0, - "deps_count" integer not null default 0, - "opt_max_attempts" integer null, - "opt_base_delay" integer null, - "opt_timeout" integer null, - "created_at" timestamptz not null default now(), - primary key ("flow_slug", "step_slug"), - constraint "steps_flow_slug_step_index_key" unique ("flow_slug", "step_index"), - constraint "steps_flow_slug_fkey" foreign key ("flow_slug") references "pgflow"."flows" ( - "flow_slug" - ) on update no action on delete no action, - constraint "opt_base_delay_is_nonnegative" check ((opt_base_delay is null) or (opt_base_delay >= 0)), - constraint "opt_max_attempts_is_nonnegative" check ((opt_max_attempts is null) or (opt_max_attempts >= 0)), - constraint "opt_timeout_is_positive" check ((opt_timeout is null) or (opt_timeout > 0)), - constraint "steps_deps_count_check" check (deps_count >= 0), - constraint "steps_step_slug_check" check (pgflow.is_valid_slug(step_slug)), - constraint "steps_step_type_check" check (step_type = 'single'::text) -); --- Create "deps" table -create table "pgflow"."deps" ( - "flow_slug" text not null, - "dep_slug" text not null, - "step_slug" text not null, - "created_at" timestamptz not null default now(), - primary key ("flow_slug", "dep_slug", "step_slug"), - constraint "deps_flow_slug_dep_slug_fkey" foreign key ("flow_slug", "dep_slug") references "pgflow"."steps" ( - "flow_slug", "step_slug" - ) on update no action on delete no action, - constraint "deps_flow_slug_fkey" foreign key ("flow_slug") references "pgflow"."flows" ( - "flow_slug" - ) on update no action on delete no action, - constraint "deps_flow_slug_step_slug_fkey" foreign key ("flow_slug", "step_slug") references "pgflow"."steps" ( - "flow_slug", "step_slug" - ) on update no action on delete no action, - constraint "deps_check" check (dep_slug <> step_slug) -); --- Create index "idx_deps_by_flow_dep" to table: "deps" -create index "idx_deps_by_flow_dep" on "pgflow"."deps" ("flow_slug", "dep_slug"); --- Create index "idx_deps_by_flow_step" to table: "deps" -create index "idx_deps_by_flow_step" on "pgflow"."deps" ("flow_slug", "step_slug"); --- Create "runs" table -create table "pgflow"."runs" ( - "run_id" uuid not null default gen_random_uuid(), - "flow_slug" text not null, - "status" text not null default 'started', - "input" jsonb not null, - "output" jsonb null, - "remaining_steps" integer not null default 0, - "started_at" timestamptz not null default now(), - "completed_at" timestamptz null, - "failed_at" timestamptz null, - primary key ("run_id"), - constraint "runs_flow_slug_fkey" foreign key ("flow_slug") references "pgflow"."flows" ( - "flow_slug" - ) on update no action on delete no action, - constraint "completed_at_is_after_started_at" check ((completed_at is null) or (completed_at >= started_at)), - constraint "completed_at_or_failed_at" check (not ((completed_at is not null) and (failed_at is not null))), - constraint "failed_at_is_after_started_at" check ((failed_at is null) or (failed_at >= started_at)), - constraint "runs_remaining_steps_check" check (remaining_steps >= 0), - constraint "status_is_valid" check (status = any(array['started'::text, 'failed'::text, 'completed'::text])) -); --- Create index "idx_runs_flow_slug" to table: "runs" -create index "idx_runs_flow_slug" on "pgflow"."runs" ("flow_slug"); --- Create index "idx_runs_status" to table: "runs" -create index "idx_runs_status" on "pgflow"."runs" ("status"); --- Create "step_states" table -create table "pgflow"."step_states" ( - "flow_slug" text not null, - "run_id" uuid not null, - "step_slug" text not null, - "status" text not null default 'created', - "remaining_tasks" integer not null default 1, - "remaining_deps" integer not null default 0, - "created_at" timestamptz not null default now(), - "started_at" timestamptz null, - "completed_at" timestamptz null, - "failed_at" timestamptz null, - primary key ("run_id", "step_slug"), - constraint "step_states_flow_slug_fkey" foreign key ("flow_slug") references "pgflow"."flows" ( - "flow_slug" - ) on update no action on delete no action, - constraint "step_states_flow_slug_step_slug_fkey" foreign key ( - "flow_slug", "step_slug" - ) references "pgflow"."steps" ("flow_slug", "step_slug") on update no action on delete no action, - constraint "step_states_run_id_fkey" foreign key ("run_id") references "pgflow"."runs" ( - "run_id" - ) on update no action on delete no action, - constraint "completed_at_is_after_started_at" check ((completed_at is null) or (completed_at >= started_at)), - constraint "completed_at_or_failed_at" check (not ((completed_at is not null) and (failed_at is not null))), - constraint "failed_at_is_after_started_at" check ((failed_at is null) or (failed_at >= started_at)), - constraint "started_at_is_after_created_at" check ((started_at is null) or (started_at >= created_at)), - constraint "status_and_remaining_tasks_match" check ((status <> 'completed'::text) or (remaining_tasks = 0)), - constraint "status_is_valid" check ( - status = any(array['created'::text, 'started'::text, 'completed'::text, 'failed'::text]) - ), - constraint "step_states_remaining_deps_check" check (remaining_deps >= 0), - constraint "step_states_remaining_tasks_check" check (remaining_tasks >= 0) -); --- Create index "idx_step_states_failed" to table: "step_states" -create index "idx_step_states_failed" on "pgflow"."step_states" ("run_id", "step_slug") where (status = 'failed'::text); --- Create index "idx_step_states_flow_slug" to table: "step_states" -create index "idx_step_states_flow_slug" on "pgflow"."step_states" ("flow_slug"); --- Create index "idx_step_states_ready" to table: "step_states" -create index "idx_step_states_ready" on "pgflow"."step_states" ("run_id", "status", "remaining_deps") where ( - (status = 'created'::text) and (remaining_deps = 0) -); --- Create "step_tasks" table -create table "pgflow"."step_tasks" ( - "flow_slug" text not null, - "run_id" uuid not null, - "step_slug" text not null, - "message_id" bigint null, - "task_index" integer not null default 0, - "status" text not null default 'queued', - "attempts_count" integer not null default 0, - "error_message" text null, - "output" jsonb null, - "queued_at" timestamptz not null default now(), - "completed_at" timestamptz null, - "failed_at" timestamptz null, - primary key ("run_id", "step_slug", "task_index"), - constraint "step_tasks_flow_slug_fkey" foreign key ("flow_slug") references "pgflow"."flows" ( - "flow_slug" - ) on update no action on delete no action, - constraint "step_tasks_run_id_fkey" foreign key ("run_id") references "pgflow"."runs" ( - "run_id" - ) on update no action on delete no action, - constraint "step_tasks_run_id_step_slug_fkey" foreign key ("run_id", "step_slug") references "pgflow"."step_states" ( - "run_id", "step_slug" - ) on update no action on delete no action, - constraint "attempts_count_nonnegative" check (attempts_count >= 0), - constraint "completed_at_is_after_queued_at" check ((completed_at is null) or (completed_at >= queued_at)), - constraint "completed_at_or_failed_at" check (not ((completed_at is not null) and (failed_at is not null))), - constraint "failed_at_is_after_queued_at" check ((failed_at is null) or (failed_at >= queued_at)), - constraint "only_single_task_per_step" check (task_index = 0), - constraint "output_valid_only_for_completed" check ((output is null) or (status = 'completed'::text)), - constraint "valid_status" check (status = any(array['queued'::text, 'completed'::text, 'failed'::text])) -); --- Create index "idx_step_tasks_completed" to table: "step_tasks" -create index "idx_step_tasks_completed" on "pgflow"."step_tasks" ("run_id", "step_slug") where ( - status = 'completed'::text -); --- Create index "idx_step_tasks_failed" to table: "step_tasks" -create index "idx_step_tasks_failed" on "pgflow"."step_tasks" ("run_id", "step_slug") where (status = 'failed'::text); --- Create index "idx_step_tasks_flow_run_step" to table: "step_tasks" -create index "idx_step_tasks_flow_run_step" on "pgflow"."step_tasks" ("flow_slug", "run_id", "step_slug"); --- Create index "idx_step_tasks_message_id" to table: "step_tasks" -create index "idx_step_tasks_message_id" on "pgflow"."step_tasks" ("message_id"); --- Create index "idx_step_tasks_queued" to table: "step_tasks" -create index "idx_step_tasks_queued" on "pgflow"."step_tasks" ("run_id", "step_slug") where (status = 'queued'::text); --- Create "poll_for_tasks" function -create function "pgflow"."poll_for_tasks"( - "queue_name" text, - "vt" integer, - "qty" integer, - "max_poll_seconds" integer default 5, - "poll_interval_ms" integer default 100 -) returns setof "pgflow"."step_task_record" language sql set "search_path" -= '' as $$ -with read_messages as ( - select * - from pgflow.read_with_poll( - queue_name, - vt, - qty, - max_poll_seconds, - poll_interval_ms - ) -), -tasks as ( - select - task.flow_slug, - task.run_id, - task.step_slug, - task.task_index, - task.message_id - from pgflow.step_tasks as task - join read_messages as message on message.msg_id = task.message_id - where task.message_id = message.msg_id - and task.status = 'queued' -), -increment_attempts as ( - update pgflow.step_tasks - set attempts_count = attempts_count + 1 - from tasks - where step_tasks.message_id = tasks.message_id - and status = 'queued' -), -runs as ( - select - r.run_id, - r.input - from pgflow.runs r - where r.run_id in (select run_id from tasks) -), -deps as ( - select - st.run_id, - st.step_slug, - dep.dep_slug, - dep_task.output as dep_output - from tasks st - join pgflow.deps dep on dep.flow_slug = st.flow_slug and dep.step_slug = st.step_slug - join pgflow.step_tasks dep_task on - dep_task.run_id = st.run_id and - dep_task.step_slug = dep.dep_slug and - dep_task.status = 'completed' -), -deps_outputs as ( - select - d.run_id, - d.step_slug, - jsonb_object_agg(d.dep_slug, d.dep_output) as deps_output - from deps d - group by d.run_id, d.step_slug -), -timeouts as ( - select - task.message_id, - coalesce(step.opt_timeout, flow.opt_timeout) + 2 as vt_delay - from tasks task - join pgflow.flows flow on flow.flow_slug = task.flow_slug - join pgflow.steps step on step.flow_slug = task.flow_slug and step.step_slug = task.step_slug -) - -select - st.flow_slug, - st.run_id, - st.step_slug, - jsonb_build_object('run', r.input) || - coalesce(dep_out.deps_output, '{}'::jsonb) as input, - st.message_id as msg_id -from tasks st -join runs r on st.run_id = r.run_id -left join deps_outputs dep_out on - dep_out.run_id = st.run_id and - dep_out.step_slug = st.step_slug -cross join lateral ( - -- TODO: this is slow because it calls set_vt for each row, and set_vt - -- builds dynamic query from string every time it is called - -- implement set_vt_batch(msgs_ids bigint[], vt_delays int[]) - select pgmq.set_vt(queue_name, st.message_id, - (select t.vt_delay from timeouts t where t.message_id = st.message_id) - ) -) set_vt; -$$; --- Create "add_step" function -create function "pgflow"."add_step"( - "flow_slug" text, - "step_slug" text, - "deps_slugs" text [], - "max_attempts" integer default null::integer, - "base_delay" integer default null::integer, - "timeout" integer default null::integer -) returns "pgflow"."steps" language sql set "search_path" -= '' as $$ -WITH - next_index AS ( - SELECT COALESCE(MAX(step_index) + 1, 0) as idx - FROM pgflow.steps - WHERE flow_slug = add_step.flow_slug - ), - create_step AS ( - INSERT INTO pgflow.steps (flow_slug, step_slug, step_index, deps_count, opt_max_attempts, opt_base_delay, opt_timeout) - SELECT add_step.flow_slug, add_step.step_slug, idx, COALESCE(array_length(deps_slugs, 1), 0), max_attempts, base_delay, timeout - FROM next_index - ON CONFLICT (flow_slug, step_slug) - DO UPDATE SET step_slug = pgflow.steps.step_slug - RETURNING * - ), - insert_deps AS ( - INSERT INTO pgflow.deps (flow_slug, dep_slug, step_slug) - SELECT add_step.flow_slug, d.dep_slug, add_step.step_slug - FROM unnest(deps_slugs) AS d(dep_slug) - ON CONFLICT (flow_slug, dep_slug, step_slug) DO NOTHING - RETURNING 1 - ) --- Return the created step -SELECT * FROM create_step; -$$; --- Create "add_step" function -create function "pgflow"."add_step"( - "flow_slug" text, - "step_slug" text, - "max_attempts" integer default null::integer, - "base_delay" integer default null::integer, - "timeout" integer default null::integer -) returns "pgflow"."steps" language sql set "search_path" -= '' as $$ --- Call the original function with an empty array - SELECT * FROM pgflow.add_step(flow_slug, step_slug, ARRAY[]::text[], max_attempts, base_delay, timeout); -$$; --- Create "calculate_retry_delay" function -create function "pgflow"."calculate_retry_delay"( - "base_delay" numeric, "attempts_count" integer -) returns integer language sql immutable parallel safe as $$ select floor(base_delay * power(2, attempts_count))::int $$; --- Create "maybe_complete_run" function -create function "pgflow"."maybe_complete_run"("run_id" uuid) returns void language sql set "search_path" = '' as $$ --- Update run status to completed and set output when there are no remaining steps - -- All done in a single declarative SQL statement - UPDATE pgflow.runs - SET - status = 'completed', - completed_at = now(), - output = ( - -- Get outputs from final steps (steps that are not dependencies for other steps) - SELECT jsonb_object_agg(st.step_slug, st.output) - FROM pgflow.step_tasks st - JOIN pgflow.step_states ss ON ss.run_id = st.run_id AND ss.step_slug = st.step_slug - JOIN pgflow.runs r ON r.run_id = ss.run_id AND r.flow_slug = ss.flow_slug - WHERE st.run_id = maybe_complete_run.run_id - AND st.status = 'completed' - AND NOT EXISTS ( - SELECT 1 - FROM pgflow.deps d - WHERE d.flow_slug = ss.flow_slug - AND d.dep_slug = ss.step_slug - ) - ) - WHERE pgflow.runs.run_id = maybe_complete_run.run_id - AND pgflow.runs.remaining_steps = 0 - AND pgflow.runs.status != 'completed'; -$$; --- Create "start_ready_steps" function -create function "pgflow"."start_ready_steps"("run_id" uuid) returns void language sql set "search_path" = '' as $$ -WITH ready_steps AS ( - SELECT * - FROM pgflow.step_states AS step_state - WHERE step_state.run_id = start_ready_steps.run_id - AND step_state.status = 'created' - AND step_state.remaining_deps = 0 - ORDER BY step_state.step_slug - FOR UPDATE -), -started_step_states AS ( - UPDATE pgflow.step_states - SET status = 'started', - started_at = now() - FROM ready_steps - WHERE pgflow.step_states.run_id = start_ready_steps.run_id - AND pgflow.step_states.step_slug = ready_steps.step_slug - RETURNING pgflow.step_states.* -), -sent_messages AS ( - SELECT - started_step.flow_slug, - started_step.run_id, - started_step.step_slug, - pgmq.send(started_step.flow_slug, jsonb_build_object( - 'flow_slug', started_step.flow_slug, - 'run_id', started_step.run_id, - 'step_slug', started_step.step_slug, - 'task_index', 0 - )) AS msg_id - FROM started_step_states AS started_step -) -INSERT INTO pgflow.step_tasks (flow_slug, run_id, step_slug, message_id) -SELECT - sent_messages.flow_slug, - sent_messages.run_id, - sent_messages.step_slug, - sent_messages.msg_id -FROM sent_messages; -$$; --- Create "complete_task" function -create function "pgflow"."complete_task"( - "run_id" uuid, "step_slug" text, "task_index" integer, "output" jsonb -) returns setof "pgflow"."step_tasks" language plpgsql set "search_path" -= '' as $$ -begin - -WITH run_lock AS ( - SELECT * FROM pgflow.runs - WHERE pgflow.runs.run_id = complete_task.run_id - FOR UPDATE -), -step_lock AS ( - SELECT * FROM pgflow.step_states - WHERE pgflow.step_states.run_id = complete_task.run_id - AND pgflow.step_states.step_slug = complete_task.step_slug - FOR UPDATE -), -task AS ( - UPDATE pgflow.step_tasks - SET - status = 'completed', - completed_at = now(), - output = complete_task.output - WHERE pgflow.step_tasks.run_id = complete_task.run_id - AND pgflow.step_tasks.step_slug = complete_task.step_slug - AND pgflow.step_tasks.task_index = complete_task.task_index - RETURNING * -), -step_state AS ( - UPDATE pgflow.step_states - SET - status = CASE - WHEN pgflow.step_states.remaining_tasks = 1 THEN 'completed' -- Will be 0 after decrement - ELSE 'started' - END, - completed_at = CASE - WHEN pgflow.step_states.remaining_tasks = 1 THEN now() -- Will be 0 after decrement - ELSE NULL - END, - remaining_tasks = pgflow.step_states.remaining_tasks - 1 - FROM task - WHERE pgflow.step_states.run_id = complete_task.run_id - AND pgflow.step_states.step_slug = complete_task.step_slug - RETURNING pgflow.step_states.* -), --- Find all dependent steps if the current step was completed -dependent_steps AS ( - SELECT d.step_slug AS dependent_step_slug - FROM pgflow.deps d - JOIN step_state s ON s.status = 'completed' AND d.flow_slug = s.flow_slug - WHERE d.dep_slug = complete_task.step_slug - ORDER BY d.step_slug -- Ensure consistent ordering -), --- Lock dependent steps before updating -dependent_steps_lock AS ( - SELECT * FROM pgflow.step_states - WHERE pgflow.step_states.run_id = complete_task.run_id - AND pgflow.step_states.step_slug IN (SELECT dependent_step_slug FROM dependent_steps) - FOR UPDATE -), --- Update all dependent steps -dependent_steps_update AS ( - UPDATE pgflow.step_states - SET remaining_deps = pgflow.step_states.remaining_deps - 1 - FROM dependent_steps - WHERE pgflow.step_states.run_id = complete_task.run_id - AND pgflow.step_states.step_slug = dependent_steps.dependent_step_slug -) --- Only decrement remaining_steps, don't update status -UPDATE pgflow.runs -SET remaining_steps = pgflow.runs.remaining_steps - 1 -FROM step_state -WHERE pgflow.runs.run_id = complete_task.run_id - AND step_state.status = 'completed'; - -PERFORM pgmq.archive( - queue_name => (SELECT run.flow_slug FROM pgflow.runs AS run WHERE run.run_id = complete_task.run_id), - msg_id => (SELECT message_id FROM pgflow.step_tasks AS step_task - WHERE step_task.run_id = complete_task.run_id - AND step_task.step_slug = complete_task.step_slug - AND step_task.task_index = complete_task.task_index) -); - -PERFORM pgflow.start_ready_steps(complete_task.run_id); - -PERFORM pgflow.maybe_complete_run(complete_task.run_id); - -RETURN QUERY SELECT * -FROM pgflow.step_tasks AS step_task -WHERE step_task.run_id = complete_task.run_id - AND step_task.step_slug = complete_task.step_slug - AND step_task.task_index = complete_task.task_index; - -end; -$$; --- Create "create_flow" function -create function "pgflow"."create_flow"( - "flow_slug" text, "max_attempts" integer default 3, "base_delay" integer default 5, "timeout" integer default 60 -) returns "pgflow"."flows" language sql set "search_path" -= '' as $$ -WITH - flow_upsert AS ( - INSERT INTO pgflow.flows (flow_slug, opt_max_attempts, opt_base_delay, opt_timeout) - VALUES (flow_slug, max_attempts, base_delay, timeout) - ON CONFLICT (flow_slug) DO UPDATE - SET flow_slug = pgflow.flows.flow_slug -- Dummy update - RETURNING * - ), - ensure_queue AS ( - SELECT pgmq.create(flow_slug) - WHERE NOT EXISTS ( - SELECT 1 FROM pgmq.list_queues() WHERE queue_name = flow_slug - ) - ) -SELECT f.* -FROM flow_upsert f -LEFT JOIN (SELECT 1 FROM ensure_queue) _dummy ON true; -- Left join ensures flow is returned -$$; --- Create "fail_task" function -create function "pgflow"."fail_task"( - "run_id" uuid, "step_slug" text, "task_index" integer, "error_message" text -) returns setof "pgflow"."step_tasks" language plpgsql set "search_path" -= '' as $$ -begin - -WITH run_lock AS ( - SELECT * FROM pgflow.runs - WHERE pgflow.runs.run_id = fail_task.run_id - FOR UPDATE -), -step_lock AS ( - SELECT * FROM pgflow.step_states - WHERE pgflow.step_states.run_id = fail_task.run_id - AND pgflow.step_states.step_slug = fail_task.step_slug - FOR UPDATE -), -flow_info AS ( - SELECT r.flow_slug - FROM pgflow.runs r - WHERE r.run_id = fail_task.run_id -), -config AS ( - SELECT - COALESCE(s.opt_max_attempts, f.opt_max_attempts) AS opt_max_attempts, - COALESCE(s.opt_base_delay, f.opt_base_delay) AS opt_base_delay - FROM pgflow.steps s - JOIN pgflow.flows f ON f.flow_slug = s.flow_slug - JOIN flow_info fi ON fi.flow_slug = s.flow_slug - WHERE s.flow_slug = fi.flow_slug AND s.step_slug = fail_task.step_slug -), - -fail_or_retry_task as ( - UPDATE pgflow.step_tasks as task - SET - status = CASE - WHEN task.attempts_count < (SELECT opt_max_attempts FROM config) THEN 'queued' - ELSE 'failed' - END, - failed_at = CASE - WHEN task.attempts_count >= (SELECT opt_max_attempts FROM config) THEN now() - ELSE NULL - END, - error_message = fail_task.error_message - WHERE task.run_id = fail_task.run_id - AND task.step_slug = fail_task.step_slug - AND task.task_index = fail_task.task_index - AND task.status = 'queued' - RETURNING * -), -maybe_fail_step AS ( - UPDATE pgflow.step_states - SET - status = CASE - WHEN (select fail_or_retry_task.status from fail_or_retry_task) = 'failed' THEN 'failed' - ELSE pgflow.step_states.status - END, - failed_at = CASE - WHEN (select fail_or_retry_task.status from fail_or_retry_task) = 'failed' THEN now() - ELSE NULL - END - FROM fail_or_retry_task - WHERE pgflow.step_states.run_id = fail_task.run_id - AND pgflow.step_states.step_slug = fail_task.step_slug - RETURNING pgflow.step_states.* -) -UPDATE pgflow.runs -SET status = CASE - WHEN (select status from maybe_fail_step) = 'failed' THEN 'failed' - ELSE status - END, - failed_at = CASE - WHEN (select status from maybe_fail_step) = 'failed' THEN now() - ELSE NULL - END -WHERE pgflow.runs.run_id = fail_task.run_id; - --- For queued tasks: delay the message for retry with exponential backoff -PERFORM ( - WITH retry_config AS ( - SELECT - COALESCE(s.opt_base_delay, f.opt_base_delay) AS base_delay - FROM pgflow.steps s - JOIN pgflow.flows f ON f.flow_slug = s.flow_slug - JOIN pgflow.runs r ON r.flow_slug = f.flow_slug - WHERE r.run_id = fail_task.run_id - AND s.step_slug = fail_task.step_slug - ), - queued_tasks AS ( - SELECT - r.flow_slug, - st.message_id, - pgflow.calculate_retry_delay((SELECT base_delay FROM retry_config), st.attempts_count) AS calculated_delay - FROM pgflow.step_tasks st - JOIN pgflow.runs r ON st.run_id = r.run_id - WHERE st.run_id = fail_task.run_id - AND st.step_slug = fail_task.step_slug - AND st.task_index = fail_task.task_index - AND st.status = 'queued' - ) - SELECT pgmq.set_vt(qt.flow_slug, qt.message_id, qt.calculated_delay) - FROM queued_tasks qt - WHERE EXISTS (SELECT 1 FROM queued_tasks) -); - --- For failed tasks: archive the message -PERFORM ( - WITH failed_tasks AS ( - SELECT r.flow_slug, st.message_id - FROM pgflow.step_tasks st - JOIN pgflow.runs r ON st.run_id = r.run_id - WHERE st.run_id = fail_task.run_id - AND st.step_slug = fail_task.step_slug - AND st.task_index = fail_task.task_index - AND st.status = 'failed' - ) - SELECT pgmq.archive(ft.flow_slug, ft.message_id) - FROM failed_tasks ft - WHERE EXISTS (SELECT 1 FROM failed_tasks) -); - -return query select * -from pgflow.step_tasks st -where st.run_id = fail_task.run_id - and st.step_slug = fail_task.step_slug - and st.task_index = fail_task.task_index; - -end; -$$; --- Create "start_flow" function -create function "pgflow"."start_flow"( - "flow_slug" text, "input" jsonb -) returns setof "pgflow"."runs" language plpgsql set "search_path" -= '' as $$ -declare - v_created_run pgflow.runs%ROWTYPE; -begin - -WITH - flow_steps AS ( - SELECT steps.flow_slug, steps.step_slug, steps.deps_count - FROM pgflow.steps - WHERE steps.flow_slug = start_flow.flow_slug - ), - created_run AS ( - INSERT INTO pgflow.runs (flow_slug, input, remaining_steps) - VALUES ( - start_flow.flow_slug, - start_flow.input, - (SELECT count(*) FROM flow_steps) - ) - RETURNING * - ), - created_step_states AS ( - INSERT INTO pgflow.step_states (flow_slug, run_id, step_slug, remaining_deps) - SELECT - fs.flow_slug, - (SELECT run_id FROM created_run), - fs.step_slug, - fs.deps_count - FROM flow_steps fs - ) -SELECT * FROM created_run INTO v_created_run; - -PERFORM pgflow.start_ready_steps(v_created_run.run_id); - -RETURN QUERY SELECT * FROM pgflow.runs where run_id = v_created_run.run_id; - -end; -$$; --- Create "workers" table -create table "pgflow"."workers" ( - "worker_id" uuid not null, - "queue_name" text not null, - "function_name" text not null, - "started_at" timestamptz not null default now(), - "stopped_at" timestamptz null, - "last_heartbeat_at" timestamptz not null default now(), - primary key ("worker_id") -); --- Create index "idx_workers_queue_name" to table: "workers" -create index "idx_workers_queue_name" on "pgflow"."workers" ("queue_name"); diff --git a/examples/playground/supabase/migrations/20250429164919_add_websites.sql b/examples/playground/supabase/migrations/20250429164919_add_websites.sql deleted file mode 100644 index b85b064ec..000000000 --- a/examples/playground/supabase/migrations/20250429164919_add_websites.sql +++ /dev/null @@ -1,44 +0,0 @@ -create table public.websites ( - id SERIAL primary key, - user_id UUID not null references auth.users (id), - website_url TEXT not null, - summary TEXT not null, - tags TEXT [] not null default '{}', - created_at TIMESTAMP WITH TIME ZONE default NOW() not null, - updated_at TIMESTAMP WITH TIME ZONE default NOW() not null -); - -create index idx_websites_website_url on public.websites (website_url); -create index idx_websites_user_id on public.websites (user_id); - --- Enable Row Level Security -alter table public.websites enable row level security; - --- Policy: Allow all users (including anonymous) to select from websites -create policy "Allow select for all users" -on public.websites -for select -to public -using (true); - --- Policy: Only owners can update their websites -create policy "Allow update for owners only" -on public.websites -for update -to authenticated -using ((select auth.uid()) = user_id) -with check ((select auth.uid()) = user_id); - --- Policy: Only owners can delete their websites -create policy "Allow delete for owners only" -on public.websites -for delete -to authenticated -using ((select auth.uid()) = user_id); - --- Policy: Ensure user_id is set to the current user on insert -create policy "Allow insert with user_id set to current user" -on public.websites -for insert -to authenticated -with check ((select auth.uid()) = user_id); diff --git a/examples/playground/supabase/migrations/20250429164929_add_start_analyze_website_flow.sql b/examples/playground/supabase/migrations/20250429164929_add_start_analyze_website_flow.sql deleted file mode 100644 index bfb40cdc3..000000000 --- a/examples/playground/supabase/migrations/20250429164929_add_start_analyze_website_flow.sql +++ /dev/null @@ -1,35 +0,0 @@ --- Create a secure wrapper function for pgflow.start_flow --- This is a security definer function that will run with the privileges of the creator --- rather than the caller, allowing it to call pgflow.start_flow even if direct access is blocked -create or replace function public.start_analyze_website_flow(url text) -returns pgflow.runs -language plpgsql -security definer -- Run as the function creator (superuser) -set search_path = public, pgflow -- Restrict search path for security -as $$ -DECLARE - result_run pgflow.runs; -BEGIN - -- Check if user is authenticated - IF auth.uid() IS NULL THEN - RAISE EXCEPTION 'User must be authenticated to start a flow'; - END IF; - - -- Call pgflow.start_flow as the function owner and get a single run record - SELECT * - INTO result_run - FROM pgflow.start_flow( - 'analyze_website', - jsonb_build_object( - 'url', url, - 'user_id', auth.uid() - ) - ) LIMIT 1; - - -- Return the single run record - RETURN result_run; -END; -$$; - --- Grant execute permission on the wrapper function to authenticated users -grant execute on function public.start_analyze_website_flow(text) to authenticated; diff --git a/examples/playground/supabase/migrations/20250429164939_permissions.sql b/examples/playground/supabase/migrations/20250429164939_permissions.sql deleted file mode 100644 index 490f3152b..000000000 --- a/examples/playground/supabase/migrations/20250429164939_permissions.sql +++ /dev/null @@ -1,57 +0,0 @@ --- Configure permissions to allow authenticated users to access pgflow tables and functions - --- Grant usage on the pgflow schema to authenticated users -grant usage on schema pgflow to authenticated; - --- Enable RLS for pgflow tables -alter table pgflow.flows enable row level security; -alter table pgflow.steps enable row level security; -alter table pgflow.deps enable row level security; -alter table pgflow.runs enable row level security; -alter table pgflow.step_states enable row level security; -alter table pgflow.step_tasks enable row level security; -alter table pgflow.workers enable row level security; - --- Create RLS policies for authenticated users to view pgflow tables -create policy flows_select_policy on pgflow.flows -for select to authenticated using (true); - -create policy steps_select_policy on pgflow.steps -for select to authenticated using (true); - -create policy deps_select_policy on pgflow.deps -for select to authenticated using (true); - -create policy runs_select_policy on pgflow.runs -for select to authenticated using (true); - -create policy step_states_select_policy on pgflow.step_states -for select to authenticated using (true); - -create policy step_tasks_select_policy on pgflow.step_tasks -for select to authenticated using (true); - -create policy workers_select_policy on pgflow.workers -for select to authenticated using (true); - --- Grant select permissions on pgflow tables to authenticated users -grant select on pgflow.flows to authenticated; -grant select on pgflow.steps to authenticated; -grant select on pgflow.deps to authenticated; -grant select on pgflow.runs to authenticated; -grant select on pgflow.step_states to authenticated; -grant select on pgflow.step_tasks to authenticated; -grant select on pgflow.workers to authenticated; - --- REVOKE direct RPC access to pgflow.start_flow from authenticated users --- This ensures that users can only call the flow through our secure wrapper -revoke execute on function pgflow.start_flow(text, jsonb) from authenticated; - --- Set up realtime subscriptions for pgflow tables -alter publication supabase_realtime add table pgflow.flows; -alter publication supabase_realtime add table pgflow.steps; -alter publication supabase_realtime add table pgflow.deps; -alter publication supabase_realtime add table pgflow.runs; -alter publication supabase_realtime add table pgflow.step_states; -alter publication supabase_realtime add table pgflow.step_tasks; -alter publication supabase_realtime add table pgflow.workers; diff --git a/examples/playground/supabase/migrations/20250430140439_create_analyze_website_flow.sql b/examples/playground/supabase/migrations/20250430140439_create_analyze_website_flow.sql deleted file mode 100644 index a35dbf018..000000000 --- a/examples/playground/supabase/migrations/20250430140439_create_analyze_website_flow.sql +++ /dev/null @@ -1,5 +0,0 @@ -SELECT pgflow.create_flow('analyze_website', max_attempts => 3, base_delay => 1, timeout => 4); -SELECT pgflow.add_step('analyze_website', 'website'); -SELECT pgflow.add_step('analyze_website', 'summary', ARRAY['website']); -SELECT pgflow.add_step('analyze_website', 'tags', ARRAY['website']); -SELECT pgflow.add_step('analyze_website', 'saveToDb', ARRAY['summary', 'tags']); diff --git a/examples/playground/supabase/migrations/20250517125006_20250517072017_pgflow_fix_poll_for_tasks_to_use_separate_statement_for_polling.sql b/examples/playground/supabase/migrations/20250517125006_20250517072017_pgflow_fix_poll_for_tasks_to_use_separate_statement_for_polling.sql deleted file mode 100644 index 74f0e91c4..000000000 --- a/examples/playground/supabase/migrations/20250517125006_20250517072017_pgflow_fix_poll_for_tasks_to_use_separate_statement_for_polling.sql +++ /dev/null @@ -1,101 +0,0 @@ --- Modify "poll_for_tasks" function -CREATE OR REPLACE FUNCTION "pgflow"."poll_for_tasks" ("queue_name" text, "vt" integer, "qty" integer, "max_poll_seconds" integer DEFAULT 5, "poll_interval_ms" integer DEFAULT 100) RETURNS SETOF "pgflow"."step_task_record" LANGUAGE plpgsql SET "search_path" = '' AS $$ -declare - msg_ids bigint[]; -begin - -- First statement: Read messages and capture their IDs - -- This gets its own snapshot and can see newly committed messages - select array_agg(msg_id) - into msg_ids - from pgflow.read_with_poll( - queue_name, - vt, - qty, - max_poll_seconds, - poll_interval_ms - ); - - -- If no messages were read, return empty set - if msg_ids is null or array_length(msg_ids, 1) is null then - return; - end if; - - -- Second statement: Process tasks with fresh snapshot - -- This can now see step_tasks that were committed during the poll - return query - with tasks as ( - select - task.flow_slug, - task.run_id, - task.step_slug, - task.task_index, - task.message_id - from pgflow.step_tasks as task - where task.message_id = any(msg_ids) - and task.status = 'queued' - ), - increment_attempts as ( - update pgflow.step_tasks - set attempts_count = attempts_count + 1 - from tasks - where step_tasks.message_id = tasks.message_id - and status = 'queued' - ), - runs as ( - select - r.run_id, - r.input - from pgflow.runs r - where r.run_id in (select run_id from tasks) - ), - deps as ( - select - st.run_id, - st.step_slug, - dep.dep_slug, - dep_task.output as dep_output - from tasks st - join pgflow.deps dep on dep.flow_slug = st.flow_slug and dep.step_slug = st.step_slug - join pgflow.step_tasks dep_task on - dep_task.run_id = st.run_id and - dep_task.step_slug = dep.dep_slug and - dep_task.status = 'completed' - ), - deps_outputs as ( - select - d.run_id, - d.step_slug, - jsonb_object_agg(d.dep_slug, d.dep_output) as deps_output - from deps d - group by d.run_id, d.step_slug - ), - timeouts as ( - select - task.message_id, - coalesce(step.opt_timeout, flow.opt_timeout) + 2 as vt_delay - from tasks task - join pgflow.flows flow on flow.flow_slug = task.flow_slug - join pgflow.steps step on step.flow_slug = task.flow_slug and step.step_slug = task.step_slug - ) - select - st.flow_slug, - st.run_id, - st.step_slug, - jsonb_build_object('run', r.input) || - coalesce(dep_out.deps_output, '{}'::jsonb) as input, - st.message_id as msg_id - from tasks st - join runs r on st.run_id = r.run_id - left join deps_outputs dep_out on - dep_out.run_id = st.run_id and - dep_out.step_slug = st.step_slug - cross join lateral ( - -- TODO: this is slow because it calls set_vt for each row, and set_vt - -- builds dynamic query from string every time it is called - -- implement set_vt_batch(msgs_ids bigint[], vt_delays int[]) - select pgmq.set_vt(queue_name, st.message_id, - (select t.vt_delay from timeouts t where t.message_id = st.message_id) - ) - ) set_vt; -end; -$$; diff --git a/examples/playground/supabase/migrations/20250610080624_20250609105135_pgflow_add_start_tasks_and_started_status.sql b/examples/playground/supabase/migrations/20250610080624_20250609105135_pgflow_add_start_tasks_and_started_status.sql deleted file mode 100644 index 7f696c8b5..000000000 --- a/examples/playground/supabase/migrations/20250610080624_20250609105135_pgflow_add_start_tasks_and_started_status.sql +++ /dev/null @@ -1,371 +0,0 @@ --- Create index "idx_workers_heartbeat" to table: "workers" -create index "idx_workers_heartbeat" on "pgflow"."workers" ("last_heartbeat_at"); --- Modify "step_tasks" table -alter table "pgflow"."step_tasks" drop constraint "valid_status", -add constraint "valid_status" check ( - status = ANY(array['queued'::text, 'started'::text, 'completed'::text, 'failed'::text]) -), -add constraint "completed_at_is_after_started_at" check ( - (completed_at is null) or (started_at is null) or (completed_at >= started_at) -), -add constraint "failed_at_is_after_started_at" check ( - (failed_at is null) or (started_at is null) or (failed_at >= started_at) -), -add constraint "started_at_is_after_queued_at" check ((started_at is null) or (started_at >= queued_at)), -add column "started_at" timestamptz null, -add column "last_worker_id" uuid null, -add constraint "step_tasks_last_worker_id_fkey" foreign key ("last_worker_id") references "pgflow"."workers" ( - "worker_id" -) on update no action on delete set null; --- Create index "idx_step_tasks_last_worker" to table: "step_tasks" -create index "idx_step_tasks_last_worker" on "pgflow"."step_tasks" ("last_worker_id") where (status = 'started'::text); --- Create index "idx_step_tasks_queued_msg" to table: "step_tasks" -create index "idx_step_tasks_queued_msg" on "pgflow"."step_tasks" ("message_id") where (status = 'queued'::text); --- Create index "idx_step_tasks_started" to table: "step_tasks" -create index "idx_step_tasks_started" on "pgflow"."step_tasks" ("started_at") where (status = 'started'::text); --- Modify "complete_task" function -create or replace function "pgflow"."complete_task"( - "run_id" uuid, "step_slug" text, "task_index" integer, "output" jsonb -) returns setof "pgflow"."step_tasks" language plpgsql set "search_path" -= '' as $$ -begin - -WITH run_lock AS ( - SELECT * FROM pgflow.runs - WHERE pgflow.runs.run_id = complete_task.run_id - FOR UPDATE -), -step_lock AS ( - SELECT * FROM pgflow.step_states - WHERE pgflow.step_states.run_id = complete_task.run_id - AND pgflow.step_states.step_slug = complete_task.step_slug - FOR UPDATE -), -task AS ( - UPDATE pgflow.step_tasks - SET - status = 'completed', - completed_at = now(), - output = complete_task.output - WHERE pgflow.step_tasks.run_id = complete_task.run_id - AND pgflow.step_tasks.step_slug = complete_task.step_slug - AND pgflow.step_tasks.task_index = complete_task.task_index - AND pgflow.step_tasks.status = 'started' - RETURNING * -), -step_state AS ( - UPDATE pgflow.step_states - SET - status = CASE - WHEN pgflow.step_states.remaining_tasks = 1 THEN 'completed' -- Will be 0 after decrement - ELSE 'started' - END, - completed_at = CASE - WHEN pgflow.step_states.remaining_tasks = 1 THEN now() -- Will be 0 after decrement - ELSE NULL - END, - remaining_tasks = pgflow.step_states.remaining_tasks - 1 - FROM task - WHERE pgflow.step_states.run_id = complete_task.run_id - AND pgflow.step_states.step_slug = complete_task.step_slug - RETURNING pgflow.step_states.* -), --- Find all dependent steps if the current step was completed -dependent_steps AS ( - SELECT d.step_slug AS dependent_step_slug - FROM pgflow.deps d - JOIN step_state s ON s.status = 'completed' AND d.flow_slug = s.flow_slug - WHERE d.dep_slug = complete_task.step_slug - ORDER BY d.step_slug -- Ensure consistent ordering -), --- Lock dependent steps before updating -dependent_steps_lock AS ( - SELECT * FROM pgflow.step_states - WHERE pgflow.step_states.run_id = complete_task.run_id - AND pgflow.step_states.step_slug IN (SELECT dependent_step_slug FROM dependent_steps) - FOR UPDATE -), --- Update all dependent steps -dependent_steps_update AS ( - UPDATE pgflow.step_states - SET remaining_deps = pgflow.step_states.remaining_deps - 1 - FROM dependent_steps - WHERE pgflow.step_states.run_id = complete_task.run_id - AND pgflow.step_states.step_slug = dependent_steps.dependent_step_slug -) --- Only decrement remaining_steps, don't update status -UPDATE pgflow.runs -SET remaining_steps = pgflow.runs.remaining_steps - 1 -FROM step_state -WHERE pgflow.runs.run_id = complete_task.run_id - AND step_state.status = 'completed'; - --- For completed tasks: archive the message -PERFORM ( - WITH completed_tasks AS ( - SELECT r.flow_slug, st.message_id - FROM pgflow.step_tasks st - JOIN pgflow.runs r ON st.run_id = r.run_id - WHERE st.run_id = complete_task.run_id - AND st.step_slug = complete_task.step_slug - AND st.task_index = complete_task.task_index - AND st.status = 'completed' - ) - SELECT pgmq.archive(ct.flow_slug, ct.message_id) - FROM completed_tasks ct - WHERE EXISTS (SELECT 1 FROM completed_tasks) -); - -PERFORM pgflow.start_ready_steps(complete_task.run_id); - -PERFORM pgflow.maybe_complete_run(complete_task.run_id); - -RETURN QUERY SELECT * -FROM pgflow.step_tasks AS step_task -WHERE step_task.run_id = complete_task.run_id - AND step_task.step_slug = complete_task.step_slug - AND step_task.task_index = complete_task.task_index; - -end; -$$; --- Modify "fail_task" function -create or replace function "pgflow"."fail_task"( - "run_id" uuid, "step_slug" text, "task_index" integer, "error_message" text -) returns setof "pgflow"."step_tasks" language plpgsql set "search_path" -= '' as $$ -begin - -WITH run_lock AS ( - SELECT * FROM pgflow.runs - WHERE pgflow.runs.run_id = fail_task.run_id - FOR UPDATE -), -step_lock AS ( - SELECT * FROM pgflow.step_states - WHERE pgflow.step_states.run_id = fail_task.run_id - AND pgflow.step_states.step_slug = fail_task.step_slug - FOR UPDATE -), -flow_info AS ( - SELECT r.flow_slug - FROM pgflow.runs r - WHERE r.run_id = fail_task.run_id -), -config AS ( - SELECT - COALESCE(s.opt_max_attempts, f.opt_max_attempts) AS opt_max_attempts, - COALESCE(s.opt_base_delay, f.opt_base_delay) AS opt_base_delay - FROM pgflow.steps s - JOIN pgflow.flows f ON f.flow_slug = s.flow_slug - JOIN flow_info fi ON fi.flow_slug = s.flow_slug - WHERE s.flow_slug = fi.flow_slug AND s.step_slug = fail_task.step_slug -), - -fail_or_retry_task as ( - UPDATE pgflow.step_tasks as task - SET - status = CASE - WHEN task.attempts_count < (SELECT opt_max_attempts FROM config) THEN 'queued' - ELSE 'failed' - END, - failed_at = CASE - WHEN task.attempts_count >= (SELECT opt_max_attempts FROM config) THEN now() - ELSE NULL - END, - started_at = CASE - WHEN task.attempts_count < (SELECT opt_max_attempts FROM config) THEN NULL - ELSE task.started_at - END, - error_message = fail_task.error_message - WHERE task.run_id = fail_task.run_id - AND task.step_slug = fail_task.step_slug - AND task.task_index = fail_task.task_index - AND task.status = 'started' - RETURNING * -), -maybe_fail_step AS ( - UPDATE pgflow.step_states - SET - status = CASE - WHEN (select fail_or_retry_task.status from fail_or_retry_task) = 'failed' THEN 'failed' - ELSE pgflow.step_states.status - END, - failed_at = CASE - WHEN (select fail_or_retry_task.status from fail_or_retry_task) = 'failed' THEN now() - ELSE NULL - END - FROM fail_or_retry_task - WHERE pgflow.step_states.run_id = fail_task.run_id - AND pgflow.step_states.step_slug = fail_task.step_slug - RETURNING pgflow.step_states.* -) -UPDATE pgflow.runs -SET status = CASE - WHEN (select status from maybe_fail_step) = 'failed' THEN 'failed' - ELSE status - END, - failed_at = CASE - WHEN (select status from maybe_fail_step) = 'failed' THEN now() - ELSE NULL - END -WHERE pgflow.runs.run_id = fail_task.run_id; - --- For queued tasks: delay the message for retry with exponential backoff -PERFORM ( - WITH retry_config AS ( - SELECT - COALESCE(s.opt_base_delay, f.opt_base_delay) AS base_delay - FROM pgflow.steps s - JOIN pgflow.flows f ON f.flow_slug = s.flow_slug - JOIN pgflow.runs r ON r.flow_slug = f.flow_slug - WHERE r.run_id = fail_task.run_id - AND s.step_slug = fail_task.step_slug - ), - queued_tasks AS ( - SELECT - r.flow_slug, - st.message_id, - pgflow.calculate_retry_delay((SELECT base_delay FROM retry_config), st.attempts_count) AS calculated_delay - FROM pgflow.step_tasks st - JOIN pgflow.runs r ON st.run_id = r.run_id - WHERE st.run_id = fail_task.run_id - AND st.step_slug = fail_task.step_slug - AND st.task_index = fail_task.task_index - AND st.status = 'queued' - ) - SELECT pgmq.set_vt(qt.flow_slug, qt.message_id, qt.calculated_delay) - FROM queued_tasks qt - WHERE EXISTS (SELECT 1 FROM queued_tasks) -); - --- For failed tasks: archive the message -PERFORM ( - WITH failed_tasks AS ( - SELECT r.flow_slug, st.message_id - FROM pgflow.step_tasks st - JOIN pgflow.runs r ON st.run_id = r.run_id - WHERE st.run_id = fail_task.run_id - AND st.step_slug = fail_task.step_slug - AND st.task_index = fail_task.task_index - AND st.status = 'failed' - ) - SELECT pgmq.archive(ft.flow_slug, ft.message_id) - FROM failed_tasks ft - WHERE EXISTS (SELECT 1 FROM failed_tasks) -); - -return query select * -from pgflow.step_tasks st -where st.run_id = fail_task.run_id - and st.step_slug = fail_task.step_slug - and st.task_index = fail_task.task_index; - -end; -$$; --- Modify "poll_for_tasks" function -create or replace function "pgflow"."poll_for_tasks"( - "queue_name" text, - "vt" integer, - "qty" integer, - "max_poll_seconds" integer default 5, - "poll_interval_ms" integer default 100 -) returns setof "pgflow"."step_task_record" language plpgsql set "search_path" -= '' as $$ -begin - -- DEPRECATED: This function is deprecated and will be removed in a future version. - -- Please update pgflow to use the new two-phase polling approach. - -- Run 'npx pgflow install' to update your installation. - raise notice 'DEPRECATED: poll_for_tasks is deprecated and will be removed. Please update pgflow via "npx pgflow install".'; - - -- Return empty set - no tasks will be processed - return; -end; -$$; --- Create "start_tasks" function -create function "pgflow"."start_tasks"( - "flow_slug" text, "msg_ids" bigint [], "worker_id" uuid -) returns setof "pgflow"."step_task_record" language sql set "search_path" -= '' as $$ -with tasks as ( - select - task.flow_slug, - task.run_id, - task.step_slug, - task.task_index, - task.message_id - from pgflow.step_tasks as task - where task.flow_slug = start_tasks.flow_slug - and task.message_id = any(msg_ids) - and task.status = 'queued' - ), - start_tasks_update as ( - update pgflow.step_tasks - set - attempts_count = attempts_count + 1, - status = 'started', - started_at = now(), - last_worker_id = worker_id - from tasks - where step_tasks.message_id = tasks.message_id - and step_tasks.flow_slug = tasks.flow_slug - and step_tasks.status = 'queued' - ), - runs as ( - select - r.run_id, - r.input - from pgflow.runs r - where r.run_id in (select run_id from tasks) - ), - deps as ( - select - st.run_id, - st.step_slug, - dep.dep_slug, - dep_task.output as dep_output - from tasks st - join pgflow.deps dep on dep.flow_slug = st.flow_slug and dep.step_slug = st.step_slug - join pgflow.step_tasks dep_task on - dep_task.run_id = st.run_id and - dep_task.step_slug = dep.dep_slug and - dep_task.status = 'completed' - ), - deps_outputs as ( - select - d.run_id, - d.step_slug, - jsonb_object_agg(d.dep_slug, d.dep_output) as deps_output - from deps d - group by d.run_id, d.step_slug - ), - timeouts as ( - select - task.message_id, - task.flow_slug, - coalesce(step.opt_timeout, flow.opt_timeout) + 2 as vt_delay - from tasks task - join pgflow.flows flow on flow.flow_slug = task.flow_slug - join pgflow.steps step on step.flow_slug = task.flow_slug and step.step_slug = task.step_slug - ) - select - st.flow_slug, - st.run_id, - st.step_slug, - jsonb_build_object('run', r.input) || - coalesce(dep_out.deps_output, '{}'::jsonb) as input, - st.message_id as msg_id - from tasks st - join runs r on st.run_id = r.run_id - left join deps_outputs dep_out on - dep_out.run_id = st.run_id and - dep_out.step_slug = st.step_slug - cross join lateral ( - -- TODO: this is slow because it calls set_vt for each row, and set_vt - -- builds dynamic query from string every time it is called - -- implement set_vt_batch(msgs_ids bigint[], vt_delays int[]) - select pgmq.set_vt(t.flow_slug, st.message_id, t.vt_delay) - from timeouts t - where t.message_id = st.message_id - and t.flow_slug = st.flow_slug - ) set_vt -$$; diff --git a/examples/playground/supabase/migrations/20250610180554_pgflow_add_set_vt_batch_and_use_it_in_start_tasks.sql b/examples/playground/supabase/migrations/20250610180554_pgflow_add_set_vt_batch_and_use_it_in_start_tasks.sql deleted file mode 100644 index 1e8585a5a..000000000 --- a/examples/playground/supabase/migrations/20250610180554_pgflow_add_set_vt_batch_and_use_it_in_start_tasks.sql +++ /dev/null @@ -1,127 +0,0 @@ --- Create "set_vt_batch" function -CREATE FUNCTION "pgflow"."set_vt_batch" ("queue_name" text, "msg_ids" bigint[], "vt_offsets" integer[]) RETURNS SETOF pgmq.message_record LANGUAGE plpgsql AS $$ -DECLARE - qtable TEXT := pgmq.format_table_name(queue_name, 'q'); - sql TEXT; -BEGIN - /* ---------- safety checks ---------------------------------------------------- */ - IF msg_ids IS NULL OR vt_offsets IS NULL OR array_length(msg_ids, 1) = 0 THEN - RETURN; -- nothing to do, return empty set - END IF; - - IF array_length(msg_ids, 1) IS DISTINCT FROM array_length(vt_offsets, 1) THEN - RAISE EXCEPTION - 'msg_ids length (%) must equal vt_offsets length (%)', - array_length(msg_ids, 1), array_length(vt_offsets, 1); - END IF; - - /* ---------- dynamic statement ------------------------------------------------ */ - /* One UPDATE joins with the unnested arrays */ - sql := format( - $FMT$ - WITH input (msg_id, vt_offset) AS ( - SELECT unnest($1)::bigint - , unnest($2)::int - ) - UPDATE pgmq.%I q - SET vt = clock_timestamp() + make_interval(secs => input.vt_offset), - read_ct = read_ct -- no change, but keeps RETURNING list aligned - FROM input - WHERE q.msg_id = input.msg_id - RETURNING q.msg_id, - q.read_ct, - q.enqueued_at, - q.vt, - q.message - $FMT$, - qtable - ); - - RETURN QUERY EXECUTE sql USING msg_ids, vt_offsets; -END; -$$; --- Modify "start_tasks" function -CREATE OR REPLACE FUNCTION "pgflow"."start_tasks" ("flow_slug" text, "msg_ids" bigint[], "worker_id" uuid) RETURNS SETOF "pgflow"."step_task_record" LANGUAGE sql SET "search_path" = '' AS $$ -with tasks as ( - select - task.flow_slug, - task.run_id, - task.step_slug, - task.task_index, - task.message_id - from pgflow.step_tasks as task - where task.flow_slug = start_tasks.flow_slug - and task.message_id = any(msg_ids) - and task.status = 'queued' - ), - start_tasks_update as ( - update pgflow.step_tasks - set - attempts_count = attempts_count + 1, - status = 'started', - started_at = now(), - last_worker_id = worker_id - from tasks - where step_tasks.message_id = tasks.message_id - and step_tasks.flow_slug = tasks.flow_slug - and step_tasks.status = 'queued' - ), - runs as ( - select - r.run_id, - r.input - from pgflow.runs r - where r.run_id in (select run_id from tasks) - ), - deps as ( - select - st.run_id, - st.step_slug, - dep.dep_slug, - dep_task.output as dep_output - from tasks st - join pgflow.deps dep on dep.flow_slug = st.flow_slug and dep.step_slug = st.step_slug - join pgflow.step_tasks dep_task on - dep_task.run_id = st.run_id and - dep_task.step_slug = dep.dep_slug and - dep_task.status = 'completed' - ), - deps_outputs as ( - select - d.run_id, - d.step_slug, - jsonb_object_agg(d.dep_slug, d.dep_output) as deps_output - from deps d - group by d.run_id, d.step_slug - ), - timeouts as ( - select - task.message_id, - task.flow_slug, - coalesce(step.opt_timeout, flow.opt_timeout) + 2 as vt_delay - from tasks task - join pgflow.flows flow on flow.flow_slug = task.flow_slug - join pgflow.steps step on step.flow_slug = task.flow_slug and step.step_slug = task.step_slug - ), - -- Batch update visibility timeouts for all messages - set_vt_batch as ( - select pgflow.set_vt_batch( - start_tasks.flow_slug, - array_agg(t.message_id order by t.message_id), - array_agg(t.vt_delay order by t.message_id) - ) - from timeouts t - ) - select - st.flow_slug, - st.run_id, - st.step_slug, - jsonb_build_object('run', r.input) || - coalesce(dep_out.deps_output, '{}'::jsonb) as input, - st.message_id as msg_id - from tasks st - join runs r on st.run_id = r.run_id - left join deps_outputs dep_out on - dep_out.run_id = st.run_id and - dep_out.step_slug = st.step_slug -$$; diff --git a/examples/playground/supabase/migrations/20250615172434_20250614124241_pgflow_add_realtime.sql b/examples/playground/supabase/migrations/20250615172434_20250614124241_pgflow_add_realtime.sql deleted file mode 100644 index 353ade1ca..000000000 --- a/examples/playground/supabase/migrations/20250615172434_20250614124241_pgflow_add_realtime.sql +++ /dev/null @@ -1,501 +0,0 @@ --- Modify "step_states" table -ALTER TABLE "pgflow"."step_states" ADD COLUMN "error_message" text NULL; --- Create index "idx_step_states_run_id" to table: "step_states" -CREATE INDEX "idx_step_states_run_id" ON "pgflow"."step_states" ("run_id"); --- Modify "maybe_complete_run" function -CREATE OR REPLACE FUNCTION "pgflow"."maybe_complete_run" ("run_id" uuid) RETURNS void LANGUAGE plpgsql SET "search_path" = '' AS $$ -declare - v_completed_run pgflow.runs%ROWTYPE; -begin - -- Update run status to completed and set output when there are no remaining steps - WITH run_output AS ( - -- Get outputs from final steps (steps that are not dependencies for other steps) - SELECT jsonb_object_agg(st.step_slug, st.output) as final_output - FROM pgflow.step_tasks st - JOIN pgflow.step_states ss ON ss.run_id = st.run_id AND ss.step_slug = st.step_slug - JOIN pgflow.runs r ON r.run_id = ss.run_id AND r.flow_slug = ss.flow_slug - WHERE st.run_id = maybe_complete_run.run_id - AND st.status = 'completed' - AND NOT EXISTS ( - SELECT 1 - FROM pgflow.deps d - WHERE d.flow_slug = ss.flow_slug - AND d.dep_slug = ss.step_slug - ) - ) - UPDATE pgflow.runs - SET - status = 'completed', - completed_at = now(), - output = (SELECT final_output FROM run_output) - WHERE pgflow.runs.run_id = maybe_complete_run.run_id - AND pgflow.runs.remaining_steps = 0 - AND pgflow.runs.status != 'completed' - RETURNING * INTO v_completed_run; - - -- Only send broadcast if run was completed - IF v_completed_run.run_id IS NOT NULL THEN - PERFORM realtime.send( - jsonb_build_object( - 'event_type', 'run:completed', - 'run_id', v_completed_run.run_id, - 'flow_slug', v_completed_run.flow_slug, - 'status', 'completed', - 'output', v_completed_run.output, - 'completed_at', v_completed_run.completed_at - ), - 'run:completed', - concat('pgflow:run:', v_completed_run.run_id), - false - ); - END IF; -end; -$$; --- Modify "start_ready_steps" function -CREATE OR REPLACE FUNCTION "pgflow"."start_ready_steps" ("run_id" uuid) RETURNS void LANGUAGE sql SET "search_path" = '' AS $$ -WITH ready_steps AS ( - SELECT * - FROM pgflow.step_states AS step_state - WHERE step_state.run_id = start_ready_steps.run_id - AND step_state.status = 'created' - AND step_state.remaining_deps = 0 - ORDER BY step_state.step_slug - FOR UPDATE -), -started_step_states AS ( - UPDATE pgflow.step_states - SET status = 'started', - started_at = now() - FROM ready_steps - WHERE pgflow.step_states.run_id = start_ready_steps.run_id - AND pgflow.step_states.step_slug = ready_steps.step_slug - RETURNING pgflow.step_states.* -), -sent_messages AS ( - SELECT - started_step.flow_slug, - started_step.run_id, - started_step.step_slug, - pgmq.send(started_step.flow_slug, jsonb_build_object( - 'flow_slug', started_step.flow_slug, - 'run_id', started_step.run_id, - 'step_slug', started_step.step_slug, - 'task_index', 0 - )) AS msg_id - FROM started_step_states AS started_step -), -broadcast_events AS ( - SELECT - realtime.send( - jsonb_build_object( - 'event_type', 'step:started', - 'run_id', started_step.run_id, - 'step_slug', started_step.step_slug, - 'status', 'started', - 'started_at', started_step.started_at, - 'remaining_tasks', 1, - 'remaining_deps', started_step.remaining_deps - ), - concat('step:', started_step.step_slug, ':started'), - concat('pgflow:run:', started_step.run_id), - false - ) - FROM started_step_states AS started_step -) -INSERT INTO pgflow.step_tasks (flow_slug, run_id, step_slug, message_id) -SELECT - sent_messages.flow_slug, - sent_messages.run_id, - sent_messages.step_slug, - sent_messages.msg_id -FROM sent_messages; -$$; --- Modify "complete_task" function -CREATE OR REPLACE FUNCTION "pgflow"."complete_task" ("run_id" uuid, "step_slug" text, "task_index" integer, "output" jsonb) RETURNS SETOF "pgflow"."step_tasks" LANGUAGE plpgsql SET "search_path" = '' AS $$ -declare - v_step_state pgflow.step_states%ROWTYPE; -begin - -WITH run_lock AS ( - SELECT * FROM pgflow.runs - WHERE pgflow.runs.run_id = complete_task.run_id - FOR UPDATE -), -step_lock AS ( - SELECT * FROM pgflow.step_states - WHERE pgflow.step_states.run_id = complete_task.run_id - AND pgflow.step_states.step_slug = complete_task.step_slug - FOR UPDATE -), -task AS ( - UPDATE pgflow.step_tasks - SET - status = 'completed', - completed_at = now(), - output = complete_task.output - WHERE pgflow.step_tasks.run_id = complete_task.run_id - AND pgflow.step_tasks.step_slug = complete_task.step_slug - AND pgflow.step_tasks.task_index = complete_task.task_index - AND pgflow.step_tasks.status = 'started' - RETURNING * -), -step_state AS ( - UPDATE pgflow.step_states - SET - status = CASE - WHEN pgflow.step_states.remaining_tasks = 1 THEN 'completed' -- Will be 0 after decrement - ELSE 'started' - END, - completed_at = CASE - WHEN pgflow.step_states.remaining_tasks = 1 THEN now() -- Will be 0 after decrement - ELSE NULL - END, - remaining_tasks = pgflow.step_states.remaining_tasks - 1 - FROM task - WHERE pgflow.step_states.run_id = complete_task.run_id - AND pgflow.step_states.step_slug = complete_task.step_slug - RETURNING pgflow.step_states.* -), --- Find all dependent steps if the current step was completed -dependent_steps AS ( - SELECT d.step_slug AS dependent_step_slug - FROM pgflow.deps d - JOIN step_state s ON s.status = 'completed' AND d.flow_slug = s.flow_slug - WHERE d.dep_slug = complete_task.step_slug - ORDER BY d.step_slug -- Ensure consistent ordering -), --- Lock dependent steps before updating -dependent_steps_lock AS ( - SELECT * FROM pgflow.step_states - WHERE pgflow.step_states.run_id = complete_task.run_id - AND pgflow.step_states.step_slug IN (SELECT dependent_step_slug FROM dependent_steps) - FOR UPDATE -), --- Update all dependent steps -dependent_steps_update AS ( - UPDATE pgflow.step_states - SET remaining_deps = pgflow.step_states.remaining_deps - 1 - FROM dependent_steps - WHERE pgflow.step_states.run_id = complete_task.run_id - AND pgflow.step_states.step_slug = dependent_steps.dependent_step_slug -) --- Only decrement remaining_steps, don't update status -UPDATE pgflow.runs -SET remaining_steps = pgflow.runs.remaining_steps - 1 -FROM step_state -WHERE pgflow.runs.run_id = complete_task.run_id - AND step_state.status = 'completed'; - --- Get the updated step state for broadcasting -SELECT * INTO v_step_state FROM pgflow.step_states -WHERE pgflow.step_states.run_id = complete_task.run_id AND pgflow.step_states.step_slug = complete_task.step_slug; - --- Send broadcast event for step completed if the step is completed -IF v_step_state.status = 'completed' THEN - PERFORM realtime.send( - jsonb_build_object( - 'event_type', 'step:completed', - 'run_id', complete_task.run_id, - 'step_slug', complete_task.step_slug, - 'status', 'completed', - 'output', complete_task.output, - 'completed_at', v_step_state.completed_at - ), - concat('step:', complete_task.step_slug, ':completed'), - concat('pgflow:run:', complete_task.run_id), - false - ); -END IF; - --- For completed tasks: archive the message -PERFORM ( - WITH completed_tasks AS ( - SELECT r.flow_slug, st.message_id - FROM pgflow.step_tasks st - JOIN pgflow.runs r ON st.run_id = r.run_id - WHERE st.run_id = complete_task.run_id - AND st.step_slug = complete_task.step_slug - AND st.task_index = complete_task.task_index - AND st.status = 'completed' - ) - SELECT pgmq.archive(ct.flow_slug, ct.message_id) - FROM completed_tasks ct - WHERE EXISTS (SELECT 1 FROM completed_tasks) -); - -PERFORM pgflow.start_ready_steps(complete_task.run_id); - -PERFORM pgflow.maybe_complete_run(complete_task.run_id); - -RETURN QUERY SELECT * -FROM pgflow.step_tasks AS step_task -WHERE step_task.run_id = complete_task.run_id - AND step_task.step_slug = complete_task.step_slug - AND step_task.task_index = complete_task.task_index; - -end; -$$; --- Modify "fail_task" function -CREATE OR REPLACE FUNCTION "pgflow"."fail_task" ("run_id" uuid, "step_slug" text, "task_index" integer, "error_message" text) RETURNS SETOF "pgflow"."step_tasks" LANGUAGE plpgsql SET "search_path" = '' AS $$ -DECLARE - v_run_failed boolean; -begin - -WITH run_lock AS ( - SELECT * FROM pgflow.runs - WHERE pgflow.runs.run_id = fail_task.run_id - FOR UPDATE -), -step_lock AS ( - SELECT * FROM pgflow.step_states - WHERE pgflow.step_states.run_id = fail_task.run_id - AND pgflow.step_states.step_slug = fail_task.step_slug - FOR UPDATE -), -flow_info AS ( - SELECT r.flow_slug - FROM pgflow.runs r - WHERE r.run_id = fail_task.run_id -), -config AS ( - SELECT - COALESCE(s.opt_max_attempts, f.opt_max_attempts) AS opt_max_attempts, - COALESCE(s.opt_base_delay, f.opt_base_delay) AS opt_base_delay - FROM pgflow.steps s - JOIN pgflow.flows f ON f.flow_slug = s.flow_slug - JOIN flow_info fi ON fi.flow_slug = s.flow_slug - WHERE s.flow_slug = fi.flow_slug AND s.step_slug = fail_task.step_slug -), -fail_or_retry_task as ( - UPDATE pgflow.step_tasks as task - SET - status = CASE - WHEN task.attempts_count < (SELECT opt_max_attempts FROM config) THEN 'queued' - ELSE 'failed' - END, - failed_at = CASE - WHEN task.attempts_count >= (SELECT opt_max_attempts FROM config) THEN now() - ELSE NULL - END, - started_at = CASE - WHEN task.attempts_count < (SELECT opt_max_attempts FROM config) THEN NULL - ELSE task.started_at - END, - error_message = fail_task.error_message - WHERE task.run_id = fail_task.run_id - AND task.step_slug = fail_task.step_slug - AND task.task_index = fail_task.task_index - AND task.status = 'started' - RETURNING * -), -maybe_fail_step AS ( - UPDATE pgflow.step_states - SET - status = CASE - WHEN (select fail_or_retry_task.status from fail_or_retry_task) = 'failed' THEN 'failed' - ELSE pgflow.step_states.status - END, - failed_at = CASE - WHEN (select fail_or_retry_task.status from fail_or_retry_task) = 'failed' THEN now() - ELSE NULL - END, - error_message = CASE - WHEN (select fail_or_retry_task.status from fail_or_retry_task) = 'failed' THEN fail_task.error_message - ELSE NULL - END - FROM fail_or_retry_task - WHERE pgflow.step_states.run_id = fail_task.run_id - AND pgflow.step_states.step_slug = fail_task.step_slug - RETURNING pgflow.step_states.* -), --- Send broadcast event for step failed if necessary -broadcast_step_failed AS ( - SELECT - realtime.send( - jsonb_build_object( - 'event_type', 'step:failed', - 'run_id', fail_task.run_id, - 'step_slug', fail_task.step_slug, - 'status', 'failed', - 'error_message', fail_task.error_message, - 'failed_at', now() - ), - concat('step:', fail_task.step_slug, ':failed'), - concat('pgflow:run:', fail_task.run_id), - false - ) - FROM maybe_fail_step - WHERE maybe_fail_step.status = 'failed' -) --- Only decrement remaining_steps, don't update status -UPDATE pgflow.runs -SET status = CASE - WHEN (select status from maybe_fail_step) = 'failed' THEN 'failed' - ELSE status - END, - failed_at = CASE - WHEN (select status from maybe_fail_step) = 'failed' THEN now() - ELSE NULL - END -WHERE pgflow.runs.run_id = fail_task.run_id -RETURNING (status = 'failed') INTO v_run_failed; - --- Send broadcast event for run failure if the run was failed -IF v_run_failed THEN - DECLARE - v_flow_slug text; - BEGIN - SELECT flow_slug INTO v_flow_slug FROM pgflow.runs WHERE pgflow.runs.run_id = fail_task.run_id; - - PERFORM realtime.send( - jsonb_build_object( - 'event_type', 'run:failed', - 'run_id', fail_task.run_id, - 'flow_slug', v_flow_slug, - 'status', 'failed', - 'error_message', fail_task.error_message, - 'failed_at', now() - ), - 'run:failed', - concat('pgflow:run:', fail_task.run_id), - false - ); - END; -END IF; - --- For queued tasks: delay the message for retry with exponential backoff -PERFORM ( - WITH retry_config AS ( - SELECT - COALESCE(s.opt_base_delay, f.opt_base_delay) AS base_delay - FROM pgflow.steps s - JOIN pgflow.flows f ON f.flow_slug = s.flow_slug - JOIN pgflow.runs r ON r.flow_slug = f.flow_slug - WHERE r.run_id = fail_task.run_id - AND s.step_slug = fail_task.step_slug - ), - queued_tasks AS ( - SELECT - r.flow_slug, - st.message_id, - pgflow.calculate_retry_delay((SELECT base_delay FROM retry_config), st.attempts_count) AS calculated_delay - FROM pgflow.step_tasks st - JOIN pgflow.runs r ON st.run_id = r.run_id - WHERE st.run_id = fail_task.run_id - AND st.step_slug = fail_task.step_slug - AND st.task_index = fail_task.task_index - AND st.status = 'queued' - ) - SELECT pgmq.set_vt(qt.flow_slug, qt.message_id, qt.calculated_delay) - FROM queued_tasks qt - WHERE EXISTS (SELECT 1 FROM queued_tasks) -); - --- For failed tasks: archive the message -PERFORM ( - WITH failed_tasks AS ( - SELECT r.flow_slug, st.message_id - FROM pgflow.step_tasks st - JOIN pgflow.runs r ON st.run_id = r.run_id - WHERE st.run_id = fail_task.run_id - AND st.step_slug = fail_task.step_slug - AND st.task_index = fail_task.task_index - AND st.status = 'failed' - ) - SELECT pgmq.archive(ft.flow_slug, ft.message_id) - FROM failed_tasks ft - WHERE EXISTS (SELECT 1 FROM failed_tasks) -); - -return query select * -from pgflow.step_tasks st -where st.run_id = fail_task.run_id - and st.step_slug = fail_task.step_slug - and st.task_index = fail_task.task_index; - -end; -$$; --- Create "get_run_with_states" function -CREATE FUNCTION "pgflow"."get_run_with_states" ("run_id" uuid) RETURNS jsonb LANGUAGE sql SECURITY DEFINER AS $$ -SELECT jsonb_build_object( - 'run', to_jsonb(r), - 'steps', COALESCE(jsonb_agg(to_jsonb(s)) FILTER (WHERE s.run_id IS NOT NULL), '[]'::jsonb) - ) - FROM pgflow.runs r - LEFT JOIN pgflow.step_states s ON s.run_id = r.run_id - WHERE r.run_id = get_run_with_states.run_id - GROUP BY r.run_id; -$$; --- Create "start_flow" function -CREATE FUNCTION "pgflow"."start_flow" ("flow_slug" text, "input" jsonb, "run_id" uuid DEFAULT NULL::uuid) RETURNS SETOF "pgflow"."runs" LANGUAGE plpgsql SET "search_path" = '' AS $$ -declare - v_created_run pgflow.runs%ROWTYPE; -begin - -WITH - flow_steps AS ( - SELECT steps.flow_slug, steps.step_slug, steps.deps_count - FROM pgflow.steps - WHERE steps.flow_slug = start_flow.flow_slug - ), - created_run AS ( - INSERT INTO pgflow.runs (run_id, flow_slug, input, remaining_steps) - VALUES ( - COALESCE(start_flow.run_id, gen_random_uuid()), - start_flow.flow_slug, - start_flow.input, - (SELECT count(*) FROM flow_steps) - ) - RETURNING * - ), - created_step_states AS ( - INSERT INTO pgflow.step_states (flow_slug, run_id, step_slug, remaining_deps) - SELECT - fs.flow_slug, - (SELECT created_run.run_id FROM created_run), - fs.step_slug, - fs.deps_count - FROM flow_steps fs - ) -SELECT * FROM created_run INTO v_created_run; - --- Send broadcast event for run started -PERFORM realtime.send( - jsonb_build_object( - 'event_type', 'run:started', - 'run_id', v_created_run.run_id, - 'flow_slug', v_created_run.flow_slug, - 'input', v_created_run.input, - 'status', 'started', - 'remaining_steps', v_created_run.remaining_steps, - 'started_at', v_created_run.started_at - ), - 'run:started', - concat('pgflow:run:', v_created_run.run_id), - false -); - -PERFORM pgflow.start_ready_steps(v_created_run.run_id); - -RETURN QUERY SELECT * FROM pgflow.runs where pgflow.runs.run_id = v_created_run.run_id; - -end; -$$; --- Create "start_flow_with_states" function -CREATE FUNCTION "pgflow"."start_flow_with_states" ("flow_slug" text, "input" jsonb, "run_id" uuid DEFAULT NULL::uuid) RETURNS jsonb LANGUAGE plpgsql SECURITY DEFINER AS $$ -DECLARE - v_run_id UUID; -BEGIN - -- Start the flow using existing function - SELECT r.run_id INTO v_run_id FROM pgflow.start_flow( - start_flow_with_states.flow_slug, - start_flow_with_states.input, - start_flow_with_states.run_id - ) AS r LIMIT 1; - - -- Use get_run_with_states to return the complete state - RETURN pgflow.get_run_with_states(v_run_id); -END; -$$; --- Drop "start_flow" function -DROP FUNCTION "pgflow"."start_flow" (text, jsonb); diff --git a/examples/playground/supabase/migrations/20250627215516_20250619195327_pgflow_fix_fail_task_missing_realtime_event.sql b/examples/playground/supabase/migrations/20250627215516_20250619195327_pgflow_fix_fail_task_missing_realtime_event.sql deleted file mode 100644 index 9782f5393..000000000 --- a/examples/playground/supabase/migrations/20250627215516_20250619195327_pgflow_fix_fail_task_missing_realtime_event.sql +++ /dev/null @@ -1,185 +0,0 @@ --- Modify "fail_task" function -CREATE OR REPLACE FUNCTION "pgflow"."fail_task" ("run_id" uuid, "step_slug" text, "task_index" integer, "error_message" text) RETURNS SETOF "pgflow"."step_tasks" LANGUAGE plpgsql SET "search_path" = '' AS $$ -DECLARE - v_run_failed boolean; - v_step_failed boolean; -begin - -WITH run_lock AS ( - SELECT * FROM pgflow.runs - WHERE pgflow.runs.run_id = fail_task.run_id - FOR UPDATE -), -step_lock AS ( - SELECT * FROM pgflow.step_states - WHERE pgflow.step_states.run_id = fail_task.run_id - AND pgflow.step_states.step_slug = fail_task.step_slug - FOR UPDATE -), -flow_info AS ( - SELECT r.flow_slug - FROM pgflow.runs r - WHERE r.run_id = fail_task.run_id -), -config AS ( - SELECT - COALESCE(s.opt_max_attempts, f.opt_max_attempts) AS opt_max_attempts, - COALESCE(s.opt_base_delay, f.opt_base_delay) AS opt_base_delay - FROM pgflow.steps s - JOIN pgflow.flows f ON f.flow_slug = s.flow_slug - JOIN flow_info fi ON fi.flow_slug = s.flow_slug - WHERE s.flow_slug = fi.flow_slug AND s.step_slug = fail_task.step_slug -), -fail_or_retry_task as ( - UPDATE pgflow.step_tasks as task - SET - status = CASE - WHEN task.attempts_count < (SELECT opt_max_attempts FROM config) THEN 'queued' - ELSE 'failed' - END, - failed_at = CASE - WHEN task.attempts_count >= (SELECT opt_max_attempts FROM config) THEN now() - ELSE NULL - END, - started_at = CASE - WHEN task.attempts_count < (SELECT opt_max_attempts FROM config) THEN NULL - ELSE task.started_at - END, - error_message = fail_task.error_message - WHERE task.run_id = fail_task.run_id - AND task.step_slug = fail_task.step_slug - AND task.task_index = fail_task.task_index - AND task.status = 'started' - RETURNING * -), -maybe_fail_step AS ( - UPDATE pgflow.step_states - SET - status = CASE - WHEN (select fail_or_retry_task.status from fail_or_retry_task) = 'failed' THEN 'failed' - ELSE pgflow.step_states.status - END, - failed_at = CASE - WHEN (select fail_or_retry_task.status from fail_or_retry_task) = 'failed' THEN now() - ELSE NULL - END, - error_message = CASE - WHEN (select fail_or_retry_task.status from fail_or_retry_task) = 'failed' THEN fail_task.error_message - ELSE NULL - END - FROM fail_or_retry_task - WHERE pgflow.step_states.run_id = fail_task.run_id - AND pgflow.step_states.step_slug = fail_task.step_slug - RETURNING pgflow.step_states.* -) --- Update run status -UPDATE pgflow.runs -SET status = CASE - WHEN (select status from maybe_fail_step) = 'failed' THEN 'failed' - ELSE status - END, - failed_at = CASE - WHEN (select status from maybe_fail_step) = 'failed' THEN now() - ELSE NULL - END -WHERE pgflow.runs.run_id = fail_task.run_id -RETURNING (status = 'failed') INTO v_run_failed; - --- Check if step failed by querying the step_states table -SELECT (status = 'failed') INTO v_step_failed -FROM pgflow.step_states -WHERE pgflow.step_states.run_id = fail_task.run_id - AND pgflow.step_states.step_slug = fail_task.step_slug; - --- Send broadcast event for step failure if the step was failed -IF v_step_failed THEN - PERFORM realtime.send( - jsonb_build_object( - 'event_type', 'step:failed', - 'run_id', fail_task.run_id, - 'step_slug', fail_task.step_slug, - 'status', 'failed', - 'error_message', fail_task.error_message, - 'failed_at', now() - ), - concat('step:', fail_task.step_slug, ':failed'), - concat('pgflow:run:', fail_task.run_id), - false - ); -END IF; - --- Send broadcast event for run failure if the run was failed -IF v_run_failed THEN - DECLARE - v_flow_slug text; - BEGIN - SELECT flow_slug INTO v_flow_slug FROM pgflow.runs WHERE pgflow.runs.run_id = fail_task.run_id; - - PERFORM realtime.send( - jsonb_build_object( - 'event_type', 'run:failed', - 'run_id', fail_task.run_id, - 'flow_slug', v_flow_slug, - 'status', 'failed', - 'error_message', fail_task.error_message, - 'failed_at', now() - ), - 'run:failed', - concat('pgflow:run:', fail_task.run_id), - false - ); - END; -END IF; - --- For queued tasks: delay the message for retry with exponential backoff -PERFORM ( - WITH retry_config AS ( - SELECT - COALESCE(s.opt_base_delay, f.opt_base_delay) AS base_delay - FROM pgflow.steps s - JOIN pgflow.flows f ON f.flow_slug = s.flow_slug - JOIN pgflow.runs r ON r.flow_slug = f.flow_slug - WHERE r.run_id = fail_task.run_id - AND s.step_slug = fail_task.step_slug - ), - queued_tasks AS ( - SELECT - r.flow_slug, - st.message_id, - pgflow.calculate_retry_delay((SELECT base_delay FROM retry_config), st.attempts_count) AS calculated_delay - FROM pgflow.step_tasks st - JOIN pgflow.runs r ON st.run_id = r.run_id - WHERE st.run_id = fail_task.run_id - AND st.step_slug = fail_task.step_slug - AND st.task_index = fail_task.task_index - AND st.status = 'queued' - ) - SELECT pgmq.set_vt(qt.flow_slug, qt.message_id, qt.calculated_delay) - FROM queued_tasks qt - WHERE EXISTS (SELECT 1 FROM queued_tasks) -); - --- For failed tasks: archive the message -PERFORM ( - WITH failed_tasks AS ( - SELECT r.flow_slug, st.message_id - FROM pgflow.step_tasks st - JOIN pgflow.runs r ON st.run_id = r.run_id - WHERE st.run_id = fail_task.run_id - AND st.step_slug = fail_task.step_slug - AND st.task_index = fail_task.task_index - AND st.status = 'failed' - ) - SELECT pgmq.archive(ft.flow_slug, ft.message_id) - FROM failed_tasks ft - WHERE EXISTS (SELECT 1 FROM failed_tasks) -); - -return query select * -from pgflow.step_tasks st -where st.run_id = fail_task.run_id - and st.step_slug = fail_task.step_slug - and st.task_index = fail_task.task_index; - -end; -$$; diff --git a/examples/playground/supabase/migrations/20250627215517_20250627090700_pgflow_fix_function_search_paths.sql b/examples/playground/supabase/migrations/20250627215517_20250627090700_pgflow_fix_function_search_paths.sql deleted file mode 100644 index 09d325c00..000000000 --- a/examples/playground/supabase/migrations/20250627215517_20250627090700_pgflow_fix_function_search_paths.sql +++ /dev/null @@ -1,6 +0,0 @@ --- Add "calculate_retry_delay" function configuration parameter -ALTER FUNCTION "pgflow"."calculate_retry_delay" SET "search_path" = ''; --- Add "is_valid_slug" function configuration parameter -ALTER FUNCTION "pgflow"."is_valid_slug" SET "search_path" = ''; --- Add "read_with_poll" function configuration parameter -ALTER FUNCTION "pgflow"."read_with_poll" SET "search_path" = ''; diff --git a/examples/playground/supabase/migrations/20251008044437_20250707210212_pgflow_add_opt_start_delay.sql b/examples/playground/supabase/migrations/20251008044437_20250707210212_pgflow_add_opt_start_delay.sql deleted file mode 100644 index 28e05deb8..000000000 --- a/examples/playground/supabase/migrations/20251008044437_20250707210212_pgflow_add_opt_start_delay.sql +++ /dev/null @@ -1,103 +0,0 @@ --- Modify "steps" table -ALTER TABLE "pgflow"."steps" ADD CONSTRAINT "opt_start_delay_is_nonnegative" CHECK ((opt_start_delay IS NULL) OR (opt_start_delay >= 0)), ADD COLUMN "opt_start_delay" integer NULL; --- Modify "start_ready_steps" function -CREATE OR REPLACE FUNCTION "pgflow"."start_ready_steps" ("run_id" uuid) RETURNS void LANGUAGE sql SET "search_path" = '' AS $$ -WITH ready_steps AS ( - SELECT * - FROM pgflow.step_states AS step_state - WHERE step_state.run_id = start_ready_steps.run_id - AND step_state.status = 'created' - AND step_state.remaining_deps = 0 - ORDER BY step_state.step_slug - FOR UPDATE -), -started_step_states AS ( - UPDATE pgflow.step_states - SET status = 'started', - started_at = now() - FROM ready_steps - WHERE pgflow.step_states.run_id = start_ready_steps.run_id - AND pgflow.step_states.step_slug = ready_steps.step_slug - RETURNING pgflow.step_states.* -), -sent_messages AS ( - SELECT - started_step.flow_slug, - started_step.run_id, - started_step.step_slug, - pgmq.send( - started_step.flow_slug, - jsonb_build_object( - 'flow_slug', started_step.flow_slug, - 'run_id', started_step.run_id, - 'step_slug', started_step.step_slug, - 'task_index', 0 - ), - COALESCE(step.opt_start_delay, 0) - ) AS msg_id - FROM started_step_states AS started_step - JOIN pgflow.steps AS step - ON step.flow_slug = started_step.flow_slug - AND step.step_slug = started_step.step_slug -), -broadcast_events AS ( - SELECT - realtime.send( - jsonb_build_object( - 'event_type', 'step:started', - 'run_id', started_step.run_id, - 'step_slug', started_step.step_slug, - 'status', 'started', - 'started_at', started_step.started_at, - 'remaining_tasks', 1, - 'remaining_deps', started_step.remaining_deps - ), - concat('step:', started_step.step_slug, ':started'), - concat('pgflow:run:', started_step.run_id), - false - ) - FROM started_step_states AS started_step -) -INSERT INTO pgflow.step_tasks (flow_slug, run_id, step_slug, message_id) -SELECT - sent_messages.flow_slug, - sent_messages.run_id, - sent_messages.step_slug, - sent_messages.msg_id -FROM sent_messages; -$$; --- Create "add_step" function -CREATE FUNCTION "pgflow"."add_step" ("flow_slug" text, "step_slug" text, "deps_slugs" text[], "max_attempts" integer DEFAULT NULL::integer, "base_delay" integer DEFAULT NULL::integer, "timeout" integer DEFAULT NULL::integer, "start_delay" integer DEFAULT NULL::integer) RETURNS "pgflow"."steps" LANGUAGE sql SET "search_path" = '' AS $$ -WITH - next_index AS ( - SELECT COALESCE(MAX(step_index) + 1, 0) as idx - FROM pgflow.steps - WHERE flow_slug = add_step.flow_slug - ), - create_step AS ( - INSERT INTO pgflow.steps (flow_slug, step_slug, step_index, deps_count, opt_max_attempts, opt_base_delay, opt_timeout, opt_start_delay) - SELECT add_step.flow_slug, add_step.step_slug, idx, COALESCE(array_length(deps_slugs, 1), 0), max_attempts, base_delay, timeout, start_delay - FROM next_index - ON CONFLICT (flow_slug, step_slug) - DO UPDATE SET step_slug = pgflow.steps.step_slug - RETURNING * - ), - insert_deps AS ( - INSERT INTO pgflow.deps (flow_slug, dep_slug, step_slug) - SELECT add_step.flow_slug, d.dep_slug, add_step.step_slug - FROM unnest(deps_slugs) AS d(dep_slug) - ON CONFLICT (flow_slug, dep_slug, step_slug) DO NOTHING - RETURNING 1 - ) --- Return the created step -SELECT * FROM create_step; -$$; --- Drop "add_step" function -DROP FUNCTION "pgflow"."add_step" (text, text, integer, integer, integer); --- Drop "add_step" function -DROP FUNCTION "pgflow"."add_step" (text, text, text[], integer, integer, integer); --- Create "add_step" function -CREATE FUNCTION "pgflow"."add_step" ("flow_slug" text, "step_slug" text, "max_attempts" integer DEFAULT NULL::integer, "base_delay" integer DEFAULT NULL::integer, "timeout" integer DEFAULT NULL::integer, "start_delay" integer DEFAULT NULL::integer) RETURNS "pgflow"."steps" LANGUAGE sql SET "search_path" = '' AS $$ --- Call the original function with an empty array - SELECT * FROM pgflow.add_step(flow_slug, step_slug, ARRAY[]::text[], max_attempts, base_delay, timeout, start_delay); -$$; diff --git a/examples/playground/supabase/migrations/20251008044438_20250719205006_pgflow_worker_deprecation.sql b/examples/playground/supabase/migrations/20251008044438_20250719205006_pgflow_worker_deprecation.sql deleted file mode 100644 index 783852022..000000000 --- a/examples/playground/supabase/migrations/20251008044438_20250719205006_pgflow_worker_deprecation.sql +++ /dev/null @@ -1,2 +0,0 @@ --- Rename a column from "stopped_at" to "deprecated_at" -ALTER TABLE "pgflow"."workers" RENAME COLUMN "stopped_at" TO "deprecated_at"; diff --git a/examples/playground/supabase/migrations/20251008044439_20251006073122_pgflow_add_map_step_type.sql b/examples/playground/supabase/migrations/20251008044439_20251006073122_pgflow_add_map_step_type.sql deleted file mode 100644 index 3046fd848..000000000 --- a/examples/playground/supabase/migrations/20251008044439_20251006073122_pgflow_add_map_step_type.sql +++ /dev/null @@ -1,1246 +0,0 @@ --- Modify "step_task_record" composite type -ALTER TYPE "pgflow"."step_task_record" ADD ATTRIBUTE "task_index" integer; --- Modify "step_states" table - Step 1: Drop old constraint and NOT NULL -ALTER TABLE "pgflow"."step_states" - DROP CONSTRAINT "step_states_remaining_tasks_check", - ALTER COLUMN "remaining_tasks" DROP NOT NULL, - ALTER COLUMN "remaining_tasks" DROP DEFAULT, - ADD COLUMN "initial_tasks" integer NULL; --- MANUAL DATA MIGRATION: Prepare existing data for new constraints --- This must run AFTER dropping NOT NULL but BEFORE adding new constraints - --- Update 1: Set remaining_tasks to NULL for 'created' status --- The new constraint "remaining_tasks_state_consistency" requires that --- remaining_tasks is NULL when status = 'created' -UPDATE "pgflow"."step_states" -SET "remaining_tasks" = NULL -WHERE "status" = 'created'; - --- Update 2: Backfill initial_tasks for 'started' steps --- The new constraint "initial_tasks_known_when_started" requires that --- initial_tasks is NOT NULL when status = 'started' --- For existing started steps, initial_tasks should equal current remaining_tasks -UPDATE "pgflow"."step_states" -SET "initial_tasks" = COALESCE("remaining_tasks", 1) -WHERE "status" = 'started'; --- Modify "step_states" table - Step 2: Add new constraints -ALTER TABLE "pgflow"."step_states" - ADD CONSTRAINT "initial_tasks_known_when_started" CHECK ((status <> 'started'::text) OR (initial_tasks IS NOT NULL)), - ADD CONSTRAINT "remaining_tasks_state_consistency" CHECK ((remaining_tasks IS NULL) OR (status <> 'created'::text)), - ADD CONSTRAINT "step_states_initial_tasks_check" CHECK ((initial_tasks IS NULL) OR (initial_tasks >= 0)); --- Modify "step_tasks" table -ALTER TABLE "pgflow"."step_tasks" DROP CONSTRAINT "only_single_task_per_step", DROP CONSTRAINT "output_valid_only_for_completed", ADD CONSTRAINT "output_valid_only_for_completed" CHECK ((output IS NULL) OR (status = ANY (ARRAY['completed'::text, 'failed'::text]))); --- Modify "steps" table -ALTER TABLE "pgflow"."steps" DROP CONSTRAINT "steps_step_type_check", ADD CONSTRAINT "steps_step_type_check" CHECK (step_type = ANY (ARRAY['single'::text, 'map'::text])); --- Modify "maybe_complete_run" function -CREATE OR REPLACE FUNCTION "pgflow"."maybe_complete_run" ("run_id" uuid) RETURNS void LANGUAGE plpgsql SET "search_path" = '' AS $$ -declare - v_completed_run pgflow.runs%ROWTYPE; -begin - -- ========================================== - -- CHECK AND COMPLETE RUN IF FINISHED - -- ========================================== - -- ---------- Complete run if all steps done ---------- - UPDATE pgflow.runs - SET - status = 'completed', - completed_at = now(), - -- Only compute expensive aggregation when actually completing the run - output = ( - -- ---------- Gather outputs from leaf steps ---------- - -- Leaf steps = steps with no dependents - -- For map steps: aggregate all task outputs into array - -- For single steps: use the single task output - SELECT jsonb_object_agg( - step_slug, - CASE - WHEN step_type = 'map' THEN aggregated_output - ELSE single_output - END - ) - FROM ( - SELECT DISTINCT - leaf_state.step_slug, - leaf_step.step_type, - -- For map steps: aggregate all task outputs - CASE WHEN leaf_step.step_type = 'map' THEN - (SELECT COALESCE(jsonb_agg(leaf_task.output ORDER BY leaf_task.task_index), '[]'::jsonb) - FROM pgflow.step_tasks leaf_task - WHERE leaf_task.run_id = leaf_state.run_id - AND leaf_task.step_slug = leaf_state.step_slug - AND leaf_task.status = 'completed') - END as aggregated_output, - -- For single steps: get the single output - CASE WHEN leaf_step.step_type = 'single' THEN - (SELECT leaf_task.output - FROM pgflow.step_tasks leaf_task - WHERE leaf_task.run_id = leaf_state.run_id - AND leaf_task.step_slug = leaf_state.step_slug - AND leaf_task.status = 'completed' - LIMIT 1) - END as single_output - FROM pgflow.step_states leaf_state - JOIN pgflow.steps leaf_step ON leaf_step.flow_slug = leaf_state.flow_slug AND leaf_step.step_slug = leaf_state.step_slug - WHERE leaf_state.run_id = maybe_complete_run.run_id - AND leaf_state.status = 'completed' - AND NOT EXISTS ( - SELECT 1 - FROM pgflow.deps dep - WHERE dep.flow_slug = leaf_state.flow_slug - AND dep.dep_slug = leaf_state.step_slug - ) - ) leaf_outputs - ) - WHERE pgflow.runs.run_id = maybe_complete_run.run_id - AND pgflow.runs.remaining_steps = 0 - AND pgflow.runs.status != 'completed' - RETURNING * INTO v_completed_run; - - -- ========================================== - -- BROADCAST COMPLETION EVENT - -- ========================================== - IF v_completed_run.run_id IS NOT NULL THEN - PERFORM realtime.send( - jsonb_build_object( - 'event_type', 'run:completed', - 'run_id', v_completed_run.run_id, - 'flow_slug', v_completed_run.flow_slug, - 'status', 'completed', - 'output', v_completed_run.output, - 'completed_at', v_completed_run.completed_at - ), - 'run:completed', - concat('pgflow:run:', v_completed_run.run_id), - false - ); - END IF; -end; -$$; --- Modify "start_ready_steps" function -CREATE OR REPLACE FUNCTION "pgflow"."start_ready_steps" ("run_id" uuid) RETURNS void LANGUAGE plpgsql SET "search_path" = '' AS $$ -begin --- ========================================== --- GUARD: No mutations on failed runs --- ========================================== -IF EXISTS (SELECT 1 FROM pgflow.runs WHERE pgflow.runs.run_id = start_ready_steps.run_id AND pgflow.runs.status = 'failed') THEN - RETURN; -END IF; - --- ========================================== --- HANDLE EMPTY ARRAY MAPS (initial_tasks = 0) --- ========================================== --- These complete immediately without spawning tasks -WITH empty_map_steps AS ( - SELECT step_state.* - FROM pgflow.step_states AS step_state - JOIN pgflow.steps AS step - ON step.flow_slug = step_state.flow_slug - AND step.step_slug = step_state.step_slug - WHERE step_state.run_id = start_ready_steps.run_id - AND step_state.status = 'created' - AND step_state.remaining_deps = 0 - AND step.step_type = 'map' - AND step_state.initial_tasks = 0 - ORDER BY step_state.step_slug - FOR UPDATE OF step_state -), --- ---------- Complete empty map steps ---------- -completed_empty_steps AS ( - UPDATE pgflow.step_states - SET status = 'completed', - started_at = now(), - completed_at = now(), - remaining_tasks = 0 - FROM empty_map_steps - WHERE pgflow.step_states.run_id = start_ready_steps.run_id - AND pgflow.step_states.step_slug = empty_map_steps.step_slug - RETURNING pgflow.step_states.* -), --- ---------- Broadcast completion events ---------- -broadcast_empty_completed AS ( - SELECT - realtime.send( - jsonb_build_object( - 'event_type', 'step:completed', - 'run_id', completed_step.run_id, - 'step_slug', completed_step.step_slug, - 'status', 'completed', - 'started_at', completed_step.started_at, - 'completed_at', completed_step.completed_at, - 'remaining_tasks', 0, - 'remaining_deps', 0, - 'output', '[]'::jsonb - ), - concat('step:', completed_step.step_slug, ':completed'), - concat('pgflow:run:', completed_step.run_id), - false - ) - FROM completed_empty_steps AS completed_step -), - --- ========================================== --- HANDLE NORMAL STEPS (initial_tasks > 0) --- ========================================== --- ---------- Find ready steps ---------- --- Steps with no remaining deps and known task count -ready_steps AS ( - SELECT * - FROM pgflow.step_states AS step_state - WHERE step_state.run_id = start_ready_steps.run_id - AND step_state.status = 'created' - AND step_state.remaining_deps = 0 - AND step_state.initial_tasks IS NOT NULL -- NEW: Cannot start with unknown count - AND step_state.initial_tasks > 0 -- Don't start taskless steps - -- Exclude empty map steps already handled - AND NOT EXISTS ( - SELECT 1 FROM empty_map_steps - WHERE empty_map_steps.run_id = step_state.run_id - AND empty_map_steps.step_slug = step_state.step_slug - ) - ORDER BY step_state.step_slug - FOR UPDATE -), --- ---------- Mark steps as started ---------- -started_step_states AS ( - UPDATE pgflow.step_states - SET status = 'started', - started_at = now(), - remaining_tasks = ready_steps.initial_tasks -- Copy initial_tasks to remaining_tasks when starting - FROM ready_steps - WHERE pgflow.step_states.run_id = start_ready_steps.run_id - AND pgflow.step_states.step_slug = ready_steps.step_slug - RETURNING pgflow.step_states.* -), - --- ========================================== --- TASK GENERATION AND QUEUE MESSAGES --- ========================================== --- ---------- Generate tasks and batch messages ---------- --- Single steps: 1 task (index 0) --- Map steps: N tasks (indices 0..N-1) -message_batches AS ( - SELECT - started_step.flow_slug, - started_step.run_id, - started_step.step_slug, - COALESCE(step.opt_start_delay, 0) as delay, - array_agg( - jsonb_build_object( - 'flow_slug', started_step.flow_slug, - 'run_id', started_step.run_id, - 'step_slug', started_step.step_slug, - 'task_index', task_idx.task_index - ) ORDER BY task_idx.task_index - ) AS messages, - array_agg(task_idx.task_index ORDER BY task_idx.task_index) AS task_indices - FROM started_step_states AS started_step - JOIN pgflow.steps AS step - ON step.flow_slug = started_step.flow_slug - AND step.step_slug = started_step.step_slug - -- Generate task indices from 0 to initial_tasks-1 - CROSS JOIN LATERAL generate_series(0, started_step.initial_tasks - 1) AS task_idx(task_index) - GROUP BY started_step.flow_slug, started_step.run_id, started_step.step_slug, step.opt_start_delay -), --- ---------- Send messages to queue ---------- --- Uses batch sending for performance with large arrays -sent_messages AS ( - SELECT - mb.flow_slug, - mb.run_id, - mb.step_slug, - task_indices.task_index, - msg_ids.msg_id - FROM message_batches mb - CROSS JOIN LATERAL unnest(mb.task_indices) WITH ORDINALITY AS task_indices(task_index, idx_ord) - CROSS JOIN LATERAL pgmq.send_batch(mb.flow_slug, mb.messages, mb.delay) WITH ORDINALITY AS msg_ids(msg_id, msg_ord) - WHERE task_indices.idx_ord = msg_ids.msg_ord -), - --- ---------- Broadcast step:started events ---------- -broadcast_events AS ( - SELECT - realtime.send( - jsonb_build_object( - 'event_type', 'step:started', - 'run_id', started_step.run_id, - 'step_slug', started_step.step_slug, - 'status', 'started', - 'started_at', started_step.started_at, - 'remaining_tasks', started_step.remaining_tasks, - 'remaining_deps', started_step.remaining_deps - ), - concat('step:', started_step.step_slug, ':started'), - concat('pgflow:run:', started_step.run_id), - false - ) - FROM started_step_states AS started_step -) - --- ========================================== --- RECORD TASKS IN DATABASE --- ========================================== -INSERT INTO pgflow.step_tasks (flow_slug, run_id, step_slug, task_index, message_id) -SELECT - sent_messages.flow_slug, - sent_messages.run_id, - sent_messages.step_slug, - sent_messages.task_index, - sent_messages.msg_id -FROM sent_messages; - -end; -$$; --- Create "cascade_complete_taskless_steps" function -CREATE FUNCTION "pgflow"."cascade_complete_taskless_steps" ("run_id" uuid) RETURNS integer LANGUAGE plpgsql AS $$ -DECLARE - v_total_completed int := 0; - v_iteration_completed int; - v_iterations int := 0; - v_max_iterations int := 50; -BEGIN - -- ========================================== - -- ITERATIVE CASCADE COMPLETION - -- ========================================== - -- Completes taskless steps in waves until none remain - LOOP - -- ---------- Safety check ---------- - v_iterations := v_iterations + 1; - IF v_iterations > v_max_iterations THEN - RAISE EXCEPTION 'Cascade loop exceeded safety limit of % iterations', v_max_iterations; - END IF; - - -- ========================================== - -- COMPLETE READY TASKLESS STEPS - -- ========================================== - WITH completed AS ( - -- ---------- Complete taskless steps ---------- - -- Steps with initial_tasks=0 and no remaining deps - UPDATE pgflow.step_states ss - SET status = 'completed', - started_at = now(), - completed_at = now(), - remaining_tasks = 0 - FROM pgflow.steps s - WHERE ss.run_id = cascade_complete_taskless_steps.run_id - AND ss.flow_slug = s.flow_slug - AND ss.step_slug = s.step_slug - AND ss.status = 'created' - AND ss.remaining_deps = 0 - AND ss.initial_tasks = 0 - -- Process in topological order to ensure proper cascade - RETURNING ss.* - ), - -- ---------- Update dependent steps ---------- - -- Propagate completion and empty arrays to dependents - dep_updates AS ( - UPDATE pgflow.step_states ss - SET remaining_deps = ss.remaining_deps - dep_count.count, - -- If the dependent is a map step and its dependency completed with 0 tasks, - -- set its initial_tasks to 0 as well - initial_tasks = CASE - WHEN s.step_type = 'map' AND dep_count.has_zero_tasks - THEN 0 -- Empty array propagation - ELSE ss.initial_tasks -- Keep existing value (including NULL) - END - FROM ( - -- Aggregate dependency updates per dependent step - SELECT - d.flow_slug, - d.step_slug as dependent_slug, - COUNT(*) as count, - BOOL_OR(c.initial_tasks = 0) as has_zero_tasks - FROM completed c - JOIN pgflow.deps d ON d.flow_slug = c.flow_slug - AND d.dep_slug = c.step_slug - GROUP BY d.flow_slug, d.step_slug - ) dep_count, - pgflow.steps s - WHERE ss.run_id = cascade_complete_taskless_steps.run_id - AND ss.flow_slug = dep_count.flow_slug - AND ss.step_slug = dep_count.dependent_slug - AND s.flow_slug = ss.flow_slug - AND s.step_slug = ss.step_slug - ), - -- ---------- Update run counters ---------- - -- Only decrement remaining_steps; let maybe_complete_run handle finalization - run_updates AS ( - UPDATE pgflow.runs r - SET remaining_steps = r.remaining_steps - c.completed_count - FROM (SELECT COUNT(*) AS completed_count FROM completed) c - WHERE r.run_id = cascade_complete_taskless_steps.run_id - AND c.completed_count > 0 - ) - -- ---------- Check iteration results ---------- - SELECT COUNT(*) INTO v_iteration_completed FROM completed; - - EXIT WHEN v_iteration_completed = 0; -- No more steps to complete - v_total_completed := v_total_completed + v_iteration_completed; - END LOOP; - - RETURN v_total_completed; -END; -$$; --- Modify "complete_task" function -CREATE OR REPLACE FUNCTION "pgflow"."complete_task" ("run_id" uuid, "step_slug" text, "task_index" integer, "output" jsonb) RETURNS SETOF "pgflow"."step_tasks" LANGUAGE plpgsql SET "search_path" = '' AS $$ -declare - v_step_state pgflow.step_states%ROWTYPE; - v_dependent_map_slug text; - v_run_record pgflow.runs%ROWTYPE; - v_step_record pgflow.step_states%ROWTYPE; -begin - --- ========================================== --- GUARD: No mutations on failed runs --- ========================================== -IF EXISTS (SELECT 1 FROM pgflow.runs WHERE pgflow.runs.run_id = complete_task.run_id AND pgflow.runs.status = 'failed') THEN - RETURN QUERY SELECT * FROM pgflow.step_tasks - WHERE pgflow.step_tasks.run_id = complete_task.run_id - AND pgflow.step_tasks.step_slug = complete_task.step_slug - AND pgflow.step_tasks.task_index = complete_task.task_index; - RETURN; -END IF; - --- ========================================== --- LOCK ACQUISITION AND TYPE VALIDATION --- ========================================== --- Acquire locks first to prevent race conditions -SELECT * INTO v_run_record FROM pgflow.runs -WHERE pgflow.runs.run_id = complete_task.run_id -FOR UPDATE; - -SELECT * INTO v_step_record FROM pgflow.step_states -WHERE pgflow.step_states.run_id = complete_task.run_id - AND pgflow.step_states.step_slug = complete_task.step_slug -FOR UPDATE; - --- Check for type violations AFTER acquiring locks -SELECT child_step.step_slug INTO v_dependent_map_slug -FROM pgflow.deps dependency -JOIN pgflow.steps child_step ON child_step.flow_slug = dependency.flow_slug - AND child_step.step_slug = dependency.step_slug -JOIN pgflow.steps parent_step ON parent_step.flow_slug = dependency.flow_slug - AND parent_step.step_slug = dependency.dep_slug -JOIN pgflow.step_states child_state ON child_state.flow_slug = child_step.flow_slug - AND child_state.step_slug = child_step.step_slug -WHERE dependency.dep_slug = complete_task.step_slug -- parent is the completing step - AND dependency.flow_slug = v_run_record.flow_slug - AND parent_step.step_type = 'single' -- Only validate single steps - AND child_step.step_type = 'map' - AND child_state.run_id = complete_task.run_id - AND child_state.initial_tasks IS NULL - AND (complete_task.output IS NULL OR jsonb_typeof(complete_task.output) != 'array') -LIMIT 1; - --- Handle type violation if detected -IF v_dependent_map_slug IS NOT NULL THEN - -- Mark run as failed immediately - UPDATE pgflow.runs - SET status = 'failed', - failed_at = now() - WHERE pgflow.runs.run_id = complete_task.run_id; - - -- Archive all active messages (both queued and started) to prevent orphaned messages - PERFORM pgmq.archive( - v_run_record.flow_slug, - array_agg(st.message_id) - ) - FROM pgflow.step_tasks st - WHERE st.run_id = complete_task.run_id - AND st.status IN ('queued', 'started') - AND st.message_id IS NOT NULL - HAVING count(*) > 0; -- Only call archive if there are messages to archive - - -- Mark current task as failed and store the output - UPDATE pgflow.step_tasks - SET status = 'failed', - failed_at = now(), - output = complete_task.output, -- Store the output that caused the violation - error_message = '[TYPE_VIOLATION] Produced ' || - CASE WHEN complete_task.output IS NULL THEN 'null' - ELSE jsonb_typeof(complete_task.output) END || - ' instead of array' - WHERE pgflow.step_tasks.run_id = complete_task.run_id - AND pgflow.step_tasks.step_slug = complete_task.step_slug - AND pgflow.step_tasks.task_index = complete_task.task_index; - - -- Mark step state as failed - UPDATE pgflow.step_states - SET status = 'failed', - failed_at = now(), - error_message = '[TYPE_VIOLATION] Map step ' || v_dependent_map_slug || - ' expects array input but dependency ' || complete_task.step_slug || - ' produced ' || CASE WHEN complete_task.output IS NULL THEN 'null' - ELSE jsonb_typeof(complete_task.output) END - WHERE pgflow.step_states.run_id = complete_task.run_id - AND pgflow.step_states.step_slug = complete_task.step_slug; - - -- Archive the current task's message (it was started, now failed) - PERFORM pgmq.archive( - v_run_record.flow_slug, - st.message_id -- Single message, use scalar form - ) - FROM pgflow.step_tasks st - WHERE st.run_id = complete_task.run_id - AND st.step_slug = complete_task.step_slug - AND st.task_index = complete_task.task_index - AND st.message_id IS NOT NULL; - - -- Return empty result - RETURN QUERY SELECT * FROM pgflow.step_tasks WHERE false; - RETURN; -END IF; - --- ========================================== --- MAIN CTE CHAIN: Update task and propagate changes --- ========================================== -WITH --- ---------- Task completion ---------- --- Update the task record with completion status and output -task AS ( - UPDATE pgflow.step_tasks - SET - status = 'completed', - completed_at = now(), - output = complete_task.output - WHERE pgflow.step_tasks.run_id = complete_task.run_id - AND pgflow.step_tasks.step_slug = complete_task.step_slug - AND pgflow.step_tasks.task_index = complete_task.task_index - AND pgflow.step_tasks.status = 'started' - RETURNING * -), --- ---------- Step state update ---------- --- Decrement remaining_tasks and potentially mark step as completed -step_state AS ( - UPDATE pgflow.step_states - SET - status = CASE - WHEN pgflow.step_states.remaining_tasks = 1 THEN 'completed' -- Will be 0 after decrement - ELSE 'started' - END, - completed_at = CASE - WHEN pgflow.step_states.remaining_tasks = 1 THEN now() -- Will be 0 after decrement - ELSE NULL - END, - remaining_tasks = pgflow.step_states.remaining_tasks - 1 - FROM task - WHERE pgflow.step_states.run_id = complete_task.run_id - AND pgflow.step_states.step_slug = complete_task.step_slug - RETURNING pgflow.step_states.* -), --- ---------- Dependency resolution ---------- --- Find all child steps that depend on the completed parent step (only if parent completed) -child_steps AS ( - SELECT deps.step_slug AS child_step_slug - FROM pgflow.deps deps - JOIN step_state parent_state ON parent_state.status = 'completed' AND deps.flow_slug = parent_state.flow_slug - WHERE deps.dep_slug = complete_task.step_slug -- dep_slug is the parent, step_slug is the child - ORDER BY deps.step_slug -- Ensure consistent ordering -), --- ---------- Lock child steps ---------- --- Acquire locks on all child steps before updating them -child_steps_lock AS ( - SELECT * FROM pgflow.step_states - WHERE pgflow.step_states.run_id = complete_task.run_id - AND pgflow.step_states.step_slug IN (SELECT child_step_slug FROM child_steps) - FOR UPDATE -), --- ---------- Update child steps ---------- --- Decrement remaining_deps and resolve NULL initial_tasks for map steps -child_steps_update AS ( - UPDATE pgflow.step_states child_state - SET remaining_deps = child_state.remaining_deps - 1, - -- Resolve NULL initial_tasks for child map steps - -- This is where child maps learn their array size from the parent - -- This CTE only runs when the parent step is complete (see child_steps JOIN) - initial_tasks = CASE - WHEN child_step.step_type = 'map' AND child_state.initial_tasks IS NULL THEN - CASE - WHEN parent_step.step_type = 'map' THEN - -- Map->map: Count all completed tasks from parent map - -- We add 1 because the current task is being completed in this transaction - -- but isn't yet visible as 'completed' in the step_tasks table - -- TODO: Refactor to use future column step_states.total_tasks - -- Would eliminate the COUNT query and just use parent_state.total_tasks - (SELECT COUNT(*)::int + 1 - FROM pgflow.step_tasks parent_tasks - WHERE parent_tasks.run_id = complete_task.run_id - AND parent_tasks.step_slug = complete_task.step_slug - AND parent_tasks.status = 'completed' - AND parent_tasks.task_index != complete_task.task_index) - ELSE - -- Single->map: Use output array length (single steps complete immediately) - CASE - WHEN complete_task.output IS NOT NULL - AND jsonb_typeof(complete_task.output) = 'array' THEN - jsonb_array_length(complete_task.output) - ELSE NULL -- Keep NULL if not an array - END - END - ELSE child_state.initial_tasks -- Keep existing value (including NULL) - END - FROM child_steps children - JOIN pgflow.steps child_step ON child_step.flow_slug = (SELECT r.flow_slug FROM pgflow.runs r WHERE r.run_id = complete_task.run_id) - AND child_step.step_slug = children.child_step_slug - JOIN pgflow.steps parent_step ON parent_step.flow_slug = (SELECT r.flow_slug FROM pgflow.runs r WHERE r.run_id = complete_task.run_id) - AND parent_step.step_slug = complete_task.step_slug - WHERE child_state.run_id = complete_task.run_id - AND child_state.step_slug = children.child_step_slug -) --- ---------- Update run remaining_steps ---------- --- Decrement the run's remaining_steps counter if step completed -UPDATE pgflow.runs -SET remaining_steps = pgflow.runs.remaining_steps - 1 -FROM step_state -WHERE pgflow.runs.run_id = complete_task.run_id - AND step_state.status = 'completed'; - --- ========================================== --- POST-COMPLETION ACTIONS --- ========================================== - --- ---------- Get updated state for broadcasting ---------- -SELECT * INTO v_step_state FROM pgflow.step_states -WHERE pgflow.step_states.run_id = complete_task.run_id AND pgflow.step_states.step_slug = complete_task.step_slug; - --- ---------- Handle step completion ---------- -IF v_step_state.status = 'completed' THEN - -- Cascade complete any taskless steps that are now ready - PERFORM pgflow.cascade_complete_taskless_steps(complete_task.run_id); - - -- Broadcast step:completed event - -- For map steps, aggregate all task outputs; for single steps, use the task output - PERFORM realtime.send( - jsonb_build_object( - 'event_type', 'step:completed', - 'run_id', complete_task.run_id, - 'step_slug', complete_task.step_slug, - 'status', 'completed', - 'output', CASE - WHEN (SELECT s.step_type FROM pgflow.steps s - WHERE s.flow_slug = v_step_state.flow_slug - AND s.step_slug = complete_task.step_slug) = 'map' THEN - -- Aggregate all task outputs for map steps - (SELECT COALESCE(jsonb_agg(st.output ORDER BY st.task_index), '[]'::jsonb) - FROM pgflow.step_tasks st - WHERE st.run_id = complete_task.run_id - AND st.step_slug = complete_task.step_slug - AND st.status = 'completed') - ELSE - -- Single step: use the individual task output - complete_task.output - END, - 'completed_at', v_step_state.completed_at - ), - concat('step:', complete_task.step_slug, ':completed'), - concat('pgflow:run:', complete_task.run_id), - false - ); -END IF; - --- ---------- Archive completed task message ---------- --- Move message from active queue to archive table -PERFORM ( - WITH completed_tasks AS ( - SELECT r.flow_slug, st.message_id - FROM pgflow.step_tasks st - JOIN pgflow.runs r ON st.run_id = r.run_id - WHERE st.run_id = complete_task.run_id - AND st.step_slug = complete_task.step_slug - AND st.task_index = complete_task.task_index - AND st.status = 'completed' - ) - SELECT pgmq.archive(ct.flow_slug, ct.message_id) - FROM completed_tasks ct - WHERE EXISTS (SELECT 1 FROM completed_tasks) -); - --- ---------- Trigger next steps ---------- --- Start any steps that are now ready (deps satisfied) -PERFORM pgflow.start_ready_steps(complete_task.run_id); - --- Check if the entire run is complete -PERFORM pgflow.maybe_complete_run(complete_task.run_id); - --- ---------- Return completed task ---------- -RETURN QUERY SELECT * -FROM pgflow.step_tasks AS step_task -WHERE step_task.run_id = complete_task.run_id - AND step_task.step_slug = complete_task.step_slug - AND step_task.task_index = complete_task.task_index; - -end; -$$; --- Modify "fail_task" function -CREATE OR REPLACE FUNCTION "pgflow"."fail_task" ("run_id" uuid, "step_slug" text, "task_index" integer, "error_message" text) RETURNS SETOF "pgflow"."step_tasks" LANGUAGE plpgsql SET "search_path" = '' AS $$ -DECLARE - v_run_failed boolean; - v_step_failed boolean; -begin - --- If run is already failed, no retries allowed -IF EXISTS (SELECT 1 FROM pgflow.runs WHERE pgflow.runs.run_id = fail_task.run_id AND pgflow.runs.status = 'failed') THEN - UPDATE pgflow.step_tasks - SET status = 'failed', - failed_at = now(), - error_message = fail_task.error_message - WHERE pgflow.step_tasks.run_id = fail_task.run_id - AND pgflow.step_tasks.step_slug = fail_task.step_slug - AND pgflow.step_tasks.task_index = fail_task.task_index - AND pgflow.step_tasks.status = 'started'; - - -- Archive the task's message - PERFORM pgmq.archive(r.flow_slug, ARRAY_AGG(st.message_id)) - FROM pgflow.step_tasks st - JOIN pgflow.runs r ON st.run_id = r.run_id - WHERE st.run_id = fail_task.run_id - AND st.step_slug = fail_task.step_slug - AND st.task_index = fail_task.task_index - AND st.message_id IS NOT NULL - GROUP BY r.flow_slug - HAVING COUNT(st.message_id) > 0; - - RETURN QUERY SELECT * FROM pgflow.step_tasks - WHERE pgflow.step_tasks.run_id = fail_task.run_id - AND pgflow.step_tasks.step_slug = fail_task.step_slug - AND pgflow.step_tasks.task_index = fail_task.task_index; - RETURN; -END IF; - -WITH run_lock AS ( - SELECT * FROM pgflow.runs - WHERE pgflow.runs.run_id = fail_task.run_id - FOR UPDATE -), -step_lock AS ( - SELECT * FROM pgflow.step_states - WHERE pgflow.step_states.run_id = fail_task.run_id - AND pgflow.step_states.step_slug = fail_task.step_slug - FOR UPDATE -), -flow_info AS ( - SELECT r.flow_slug - FROM pgflow.runs r - WHERE r.run_id = fail_task.run_id -), -config AS ( - SELECT - COALESCE(s.opt_max_attempts, f.opt_max_attempts) AS opt_max_attempts, - COALESCE(s.opt_base_delay, f.opt_base_delay) AS opt_base_delay - FROM pgflow.steps s - JOIN pgflow.flows f ON f.flow_slug = s.flow_slug - JOIN flow_info fi ON fi.flow_slug = s.flow_slug - WHERE s.flow_slug = fi.flow_slug AND s.step_slug = fail_task.step_slug -), -fail_or_retry_task as ( - UPDATE pgflow.step_tasks as task - SET - status = CASE - WHEN task.attempts_count < (SELECT opt_max_attempts FROM config) THEN 'queued' - ELSE 'failed' - END, - failed_at = CASE - WHEN task.attempts_count >= (SELECT opt_max_attempts FROM config) THEN now() - ELSE NULL - END, - started_at = CASE - WHEN task.attempts_count < (SELECT opt_max_attempts FROM config) THEN NULL - ELSE task.started_at - END, - error_message = fail_task.error_message - WHERE task.run_id = fail_task.run_id - AND task.step_slug = fail_task.step_slug - AND task.task_index = fail_task.task_index - AND task.status = 'started' - RETURNING * -), -maybe_fail_step AS ( - UPDATE pgflow.step_states - SET - status = CASE - WHEN (select fail_or_retry_task.status from fail_or_retry_task) = 'failed' THEN 'failed' - ELSE pgflow.step_states.status - END, - failed_at = CASE - WHEN (select fail_or_retry_task.status from fail_or_retry_task) = 'failed' THEN now() - ELSE NULL - END, - error_message = CASE - WHEN (select fail_or_retry_task.status from fail_or_retry_task) = 'failed' THEN fail_task.error_message - ELSE NULL - END - FROM fail_or_retry_task - WHERE pgflow.step_states.run_id = fail_task.run_id - AND pgflow.step_states.step_slug = fail_task.step_slug - RETURNING pgflow.step_states.* -) --- Update run status -UPDATE pgflow.runs -SET status = CASE - WHEN (select status from maybe_fail_step) = 'failed' THEN 'failed' - ELSE status - END, - failed_at = CASE - WHEN (select status from maybe_fail_step) = 'failed' THEN now() - ELSE NULL - END -WHERE pgflow.runs.run_id = fail_task.run_id -RETURNING (status = 'failed') INTO v_run_failed; - --- Check if step failed by querying the step_states table -SELECT (status = 'failed') INTO v_step_failed -FROM pgflow.step_states -WHERE pgflow.step_states.run_id = fail_task.run_id - AND pgflow.step_states.step_slug = fail_task.step_slug; - --- Send broadcast event for step failure if the step was failed -IF v_step_failed THEN - PERFORM realtime.send( - jsonb_build_object( - 'event_type', 'step:failed', - 'run_id', fail_task.run_id, - 'step_slug', fail_task.step_slug, - 'status', 'failed', - 'error_message', fail_task.error_message, - 'failed_at', now() - ), - concat('step:', fail_task.step_slug, ':failed'), - concat('pgflow:run:', fail_task.run_id), - false - ); -END IF; - --- Send broadcast event for run failure if the run was failed -IF v_run_failed THEN - DECLARE - v_flow_slug text; - BEGIN - SELECT flow_slug INTO v_flow_slug FROM pgflow.runs WHERE pgflow.runs.run_id = fail_task.run_id; - - PERFORM realtime.send( - jsonb_build_object( - 'event_type', 'run:failed', - 'run_id', fail_task.run_id, - 'flow_slug', v_flow_slug, - 'status', 'failed', - 'error_message', fail_task.error_message, - 'failed_at', now() - ), - 'run:failed', - concat('pgflow:run:', fail_task.run_id), - false - ); - END; -END IF; - --- Archive all active messages (both queued and started) when run fails -IF v_run_failed THEN - PERFORM pgmq.archive(r.flow_slug, ARRAY_AGG(st.message_id)) - FROM pgflow.step_tasks st - JOIN pgflow.runs r ON st.run_id = r.run_id - WHERE st.run_id = fail_task.run_id - AND st.status IN ('queued', 'started') - AND st.message_id IS NOT NULL - GROUP BY r.flow_slug - HAVING COUNT(st.message_id) > 0; -END IF; - --- For queued tasks: delay the message for retry with exponential backoff -PERFORM ( - WITH retry_config AS ( - SELECT - COALESCE(s.opt_base_delay, f.opt_base_delay) AS base_delay - FROM pgflow.steps s - JOIN pgflow.flows f ON f.flow_slug = s.flow_slug - JOIN pgflow.runs r ON r.flow_slug = f.flow_slug - WHERE r.run_id = fail_task.run_id - AND s.step_slug = fail_task.step_slug - ), - queued_tasks AS ( - SELECT - r.flow_slug, - st.message_id, - pgflow.calculate_retry_delay((SELECT base_delay FROM retry_config), st.attempts_count) AS calculated_delay - FROM pgflow.step_tasks st - JOIN pgflow.runs r ON st.run_id = r.run_id - WHERE st.run_id = fail_task.run_id - AND st.step_slug = fail_task.step_slug - AND st.task_index = fail_task.task_index - AND st.status = 'queued' - ) - SELECT pgmq.set_vt(qt.flow_slug, qt.message_id, qt.calculated_delay) - FROM queued_tasks qt - WHERE EXISTS (SELECT 1 FROM queued_tasks) -); - --- For failed tasks: archive the message -PERFORM pgmq.archive(r.flow_slug, ARRAY_AGG(st.message_id)) -FROM pgflow.step_tasks st -JOIN pgflow.runs r ON st.run_id = r.run_id -WHERE st.run_id = fail_task.run_id - AND st.step_slug = fail_task.step_slug - AND st.task_index = fail_task.task_index - AND st.status = 'failed' - AND st.message_id IS NOT NULL -GROUP BY r.flow_slug -HAVING COUNT(st.message_id) > 0; - -return query select * -from pgflow.step_tasks st -where st.run_id = fail_task.run_id - and st.step_slug = fail_task.step_slug - and st.task_index = fail_task.task_index; - -end; -$$; --- Modify "start_flow" function -CREATE OR REPLACE FUNCTION "pgflow"."start_flow" ("flow_slug" text, "input" jsonb, "run_id" uuid DEFAULT NULL::uuid) RETURNS SETOF "pgflow"."runs" LANGUAGE plpgsql SET "search_path" = '' AS $$ -declare - v_created_run pgflow.runs%ROWTYPE; - v_root_map_count int; -begin - --- ========================================== --- VALIDATION: Root map array input --- ========================================== -WITH root_maps AS ( - SELECT step_slug - FROM pgflow.steps - WHERE steps.flow_slug = start_flow.flow_slug - AND steps.step_type = 'map' - AND steps.deps_count = 0 -) -SELECT COUNT(*) INTO v_root_map_count FROM root_maps; - --- If we have root map steps, validate that input is an array -IF v_root_map_count > 0 THEN - -- First check for NULL (should be caught by NOT NULL constraint, but be defensive) - IF start_flow.input IS NULL THEN - RAISE EXCEPTION 'Flow % has root map steps but input is NULL', start_flow.flow_slug; - END IF; - - -- Then check if it's not an array - IF jsonb_typeof(start_flow.input) != 'array' THEN - RAISE EXCEPTION 'Flow % has root map steps but input is not an array (got %)', - start_flow.flow_slug, jsonb_typeof(start_flow.input); - END IF; -END IF; - --- ========================================== --- MAIN CTE CHAIN: Create run and step states --- ========================================== -WITH - -- ---------- Gather flow metadata ---------- - flow_steps AS ( - SELECT steps.flow_slug, steps.step_slug, steps.step_type, steps.deps_count - FROM pgflow.steps - WHERE steps.flow_slug = start_flow.flow_slug - ), - -- ---------- Create run record ---------- - created_run AS ( - INSERT INTO pgflow.runs (run_id, flow_slug, input, remaining_steps) - VALUES ( - COALESCE(start_flow.run_id, gen_random_uuid()), - start_flow.flow_slug, - start_flow.input, - (SELECT count(*) FROM flow_steps) - ) - RETURNING * - ), - -- ---------- Create step states ---------- - -- Sets initial_tasks: known for root maps, NULL for dependent maps - created_step_states AS ( - INSERT INTO pgflow.step_states (flow_slug, run_id, step_slug, remaining_deps, initial_tasks) - SELECT - fs.flow_slug, - (SELECT created_run.run_id FROM created_run), - fs.step_slug, - fs.deps_count, - -- Updated logic for initial_tasks: - CASE - WHEN fs.step_type = 'map' AND fs.deps_count = 0 THEN - -- Root map: get array length from input - CASE - WHEN jsonb_typeof(start_flow.input) = 'array' THEN - jsonb_array_length(start_flow.input) - ELSE - 1 - END - WHEN fs.step_type = 'map' AND fs.deps_count > 0 THEN - -- Dependent map: unknown until dependencies complete - NULL - ELSE - -- Single steps: always 1 task - 1 - END - FROM flow_steps fs - ) -SELECT * FROM created_run INTO v_created_run; - --- ========================================== --- POST-CREATION ACTIONS --- ========================================== - --- ---------- Broadcast run:started event ---------- -PERFORM realtime.send( - jsonb_build_object( - 'event_type', 'run:started', - 'run_id', v_created_run.run_id, - 'flow_slug', v_created_run.flow_slug, - 'input', v_created_run.input, - 'status', 'started', - 'remaining_steps', v_created_run.remaining_steps, - 'started_at', v_created_run.started_at - ), - 'run:started', - concat('pgflow:run:', v_created_run.run_id), - false -); - --- ---------- Complete taskless steps ---------- --- Handle empty array maps that should auto-complete -PERFORM pgflow.cascade_complete_taskless_steps(v_created_run.run_id); - --- ---------- Start initial steps ---------- --- Start root steps (those with no dependencies) -PERFORM pgflow.start_ready_steps(v_created_run.run_id); - --- ---------- Check for run completion ---------- --- If cascade completed all steps (zero-task flows), finalize the run -PERFORM pgflow.maybe_complete_run(v_created_run.run_id); - -RETURN QUERY SELECT * FROM pgflow.runs where pgflow.runs.run_id = v_created_run.run_id; - -end; -$$; --- Modify "start_tasks" function -CREATE OR REPLACE FUNCTION "pgflow"."start_tasks" ("flow_slug" text, "msg_ids" bigint[], "worker_id" uuid) RETURNS SETOF "pgflow"."step_task_record" LANGUAGE sql SET "search_path" = '' AS $$ -with tasks as ( - select - task.flow_slug, - task.run_id, - task.step_slug, - task.task_index, - task.message_id - from pgflow.step_tasks as task - join pgflow.runs r on r.run_id = task.run_id - where task.flow_slug = start_tasks.flow_slug - and task.message_id = any(msg_ids) - and task.status = 'queued' - -- MVP: Don't start tasks on failed runs - and r.status != 'failed' - ), - start_tasks_update as ( - update pgflow.step_tasks - set - attempts_count = attempts_count + 1, - status = 'started', - started_at = now(), - last_worker_id = worker_id - from tasks - where step_tasks.message_id = tasks.message_id - and step_tasks.flow_slug = tasks.flow_slug - and step_tasks.status = 'queued' - ), - runs as ( - select - r.run_id, - r.input - from pgflow.runs r - where r.run_id in (select run_id from tasks) - ), - deps as ( - select - st.run_id, - st.step_slug, - dep.dep_slug, - -- Aggregate map outputs or use single output - CASE - WHEN dep_step.step_type = 'map' THEN - -- Aggregate all task outputs ordered by task_index - -- Use COALESCE to return empty array if no tasks - (SELECT COALESCE(jsonb_agg(dt.output ORDER BY dt.task_index), '[]'::jsonb) - FROM pgflow.step_tasks dt - WHERE dt.run_id = st.run_id - AND dt.step_slug = dep.dep_slug - AND dt.status = 'completed') - ELSE - -- Single step: use the single task output - dep_task.output - END as dep_output - from tasks st - join pgflow.deps dep on dep.flow_slug = st.flow_slug and dep.step_slug = st.step_slug - join pgflow.steps dep_step on dep_step.flow_slug = dep.flow_slug and dep_step.step_slug = dep.dep_slug - left join pgflow.step_tasks dep_task on - dep_task.run_id = st.run_id and - dep_task.step_slug = dep.dep_slug and - dep_task.status = 'completed' - and dep_step.step_type = 'single' -- Only join for single steps - ), - deps_outputs as ( - select - d.run_id, - d.step_slug, - jsonb_object_agg(d.dep_slug, d.dep_output) as deps_output, - count(*) as dep_count - from deps d - group by d.run_id, d.step_slug - ), - timeouts as ( - select - task.message_id, - task.flow_slug, - coalesce(step.opt_timeout, flow.opt_timeout) + 2 as vt_delay - from tasks task - join pgflow.flows flow on flow.flow_slug = task.flow_slug - join pgflow.steps step on step.flow_slug = task.flow_slug and step.step_slug = task.step_slug - ), - -- Batch update visibility timeouts for all messages - set_vt_batch as ( - select pgflow.set_vt_batch( - start_tasks.flow_slug, - array_agg(t.message_id order by t.message_id), - array_agg(t.vt_delay order by t.message_id) - ) - from timeouts t - ) - select - st.flow_slug, - st.run_id, - st.step_slug, - -- ========================================== - -- INPUT CONSTRUCTION LOGIC - -- ========================================== - -- This nested CASE statement determines how to construct the input - -- for each task based on the step type (map vs non-map). - -- - -- The fundamental difference: - -- - Map steps: Receive RAW array elements (e.g., just 42 or "hello") - -- - Non-map steps: Receive structured objects with named keys - -- (e.g., {"run": {...}, "dependency1": {...}}) - -- ========================================== - CASE - -- -------------------- MAP STEPS -------------------- - -- Map steps process arrays element-by-element. - -- Each task receives ONE element from the array at its task_index position. - WHEN step.step_type = 'map' THEN - -- Map steps get raw array elements without any wrapper object - CASE - -- ROOT MAP: Gets array from run input - -- Example: run input = [1, 2, 3] - -- task 0 gets: 1 - -- task 1 gets: 2 - -- task 2 gets: 3 - WHEN step.deps_count = 0 THEN - -- Root map (deps_count = 0): no dependencies, reads from run input. - -- Extract the element at task_index from the run's input array. - -- Note: If run input is not an array, this will return NULL - -- and the flow will fail (validated in start_flow). - jsonb_array_element(r.input, st.task_index) - - -- DEPENDENT MAP: Gets array from its single dependency - -- Example: dependency output = ["a", "b", "c"] - -- task 0 gets: "a" - -- task 1 gets: "b" - -- task 2 gets: "c" - ELSE - -- Has dependencies (should be exactly 1 for map steps). - -- Extract the element at task_index from the dependency's output array. - -- - -- Why the subquery with jsonb_each? - -- - The dependency outputs a raw array: [1, 2, 3] - -- - deps_outputs aggregates it into: {"dep_name": [1, 2, 3]} - -- - We need to unwrap and get just the array value - -- - Map steps have exactly 1 dependency (enforced by add_step) - -- - So jsonb_each will return exactly 1 row - -- - We extract the 'value' which is the raw array [1, 2, 3] - -- - Then get the element at task_index from that array - (SELECT jsonb_array_element(value, st.task_index) - FROM jsonb_each(dep_out.deps_output) - LIMIT 1) - END - - -- -------------------- NON-MAP STEPS -------------------- - -- Regular (non-map) steps receive ALL inputs as a structured object. - -- This includes the original run input plus all dependency outputs. - ELSE - -- Non-map steps get structured input with named keys - -- Example output: { - -- "run": {"original": "input"}, - -- "step1": {"output": "from_step1"}, - -- "step2": {"output": "from_step2"} - -- } - -- - -- Build object with 'run' key containing original input - jsonb_build_object('run', r.input) || - -- Merge with deps_output which already has dependency outputs - -- deps_output format: {"dep1": output1, "dep2": output2, ...} - -- If no dependencies, defaults to empty object - coalesce(dep_out.deps_output, '{}'::jsonb) - END as input, - st.message_id as msg_id, - st.task_index as task_index - from tasks st - join runs r on st.run_id = r.run_id - join pgflow.steps step on - step.flow_slug = st.flow_slug and - step.step_slug = st.step_slug - left join deps_outputs dep_out on - dep_out.run_id = st.run_id and - dep_out.step_slug = st.step_slug -$$; --- Create "add_step" function -CREATE FUNCTION "pgflow"."add_step" ("flow_slug" text, "step_slug" text, "deps_slugs" text[] DEFAULT '{}', "max_attempts" integer DEFAULT NULL::integer, "base_delay" integer DEFAULT NULL::integer, "timeout" integer DEFAULT NULL::integer, "start_delay" integer DEFAULT NULL::integer, "step_type" text DEFAULT 'single') RETURNS "pgflow"."steps" LANGUAGE plpgsql SET "search_path" = '' AS $$ -DECLARE - result_step pgflow.steps; - next_idx int; -BEGIN - -- Validate map step constraints - -- Map steps can have either: - -- 0 dependencies (root map - maps over flow input array) - -- 1 dependency (dependent map - maps over dependency output array) - IF COALESCE(add_step.step_type, 'single') = 'map' AND COALESCE(array_length(add_step.deps_slugs, 1), 0) > 1 THEN - RAISE EXCEPTION 'Map step "%" can have at most one dependency, but % were provided: %', - add_step.step_slug, - COALESCE(array_length(add_step.deps_slugs, 1), 0), - array_to_string(add_step.deps_slugs, ', '); - END IF; - - -- Get next step index - SELECT COALESCE(MAX(s.step_index) + 1, 0) INTO next_idx - FROM pgflow.steps s - WHERE s.flow_slug = add_step.flow_slug; - - -- Create the step - INSERT INTO pgflow.steps ( - flow_slug, step_slug, step_type, step_index, deps_count, - opt_max_attempts, opt_base_delay, opt_timeout, opt_start_delay - ) - VALUES ( - add_step.flow_slug, - add_step.step_slug, - COALESCE(add_step.step_type, 'single'), - next_idx, - COALESCE(array_length(add_step.deps_slugs, 1), 0), - add_step.max_attempts, - add_step.base_delay, - add_step.timeout, - add_step.start_delay - ) - ON CONFLICT ON CONSTRAINT steps_pkey - DO UPDATE SET step_slug = EXCLUDED.step_slug - RETURNING * INTO result_step; - - -- Insert dependencies - INSERT INTO pgflow.deps (flow_slug, dep_slug, step_slug) - SELECT add_step.flow_slug, d.dep_slug, add_step.step_slug - FROM unnest(COALESCE(add_step.deps_slugs, '{}')) AS d(dep_slug) - WHERE add_step.deps_slugs IS NOT NULL AND array_length(add_step.deps_slugs, 1) > 0 - ON CONFLICT ON CONSTRAINT deps_pkey DO NOTHING; - - RETURN result_step; -END; -$$; --- Drop "add_step" function -DROP FUNCTION "pgflow"."add_step" (text, text, integer, integer, integer, integer); --- Drop "add_step" function -DROP FUNCTION "pgflow"."add_step" (text, text, text[], integer, integer, integer, integer); diff --git a/examples/playground/tailwind.config.ts b/examples/playground/tailwind.config.ts deleted file mode 100644 index 41668a3b1..000000000 --- a/examples/playground/tailwind.config.ts +++ /dev/null @@ -1,80 +0,0 @@ -import type { Config } from "tailwindcss"; - -const config = { - darkMode: ["class"], - content: [ - "./pages/**/*.{ts,tsx}", - "./components/**/*.{ts,tsx}", - "./app/**/*.{ts,tsx}", - "./src/**/*.{ts,tsx}", - ], - prefix: "", - theme: { - container: { - center: true, - padding: "2rem", - screens: { - "2xl": "1400px", - }, - }, - extend: { - colors: { - border: "hsl(var(--border))", - input: "hsl(var(--input))", - ring: "hsl(var(--ring))", - background: "hsl(var(--background))", - foreground: "hsl(var(--foreground))", - primary: { - DEFAULT: "hsl(var(--primary))", - foreground: "hsl(var(--primary-foreground))", - }, - secondary: { - DEFAULT: "hsl(var(--secondary))", - foreground: "hsl(var(--secondary-foreground))", - }, - destructive: { - DEFAULT: "hsl(var(--destructive))", - foreground: "hsl(var(--destructive-foreground))", - }, - muted: { - DEFAULT: "hsl(var(--muted))", - foreground: "hsl(var(--muted-foreground))", - }, - accent: { - DEFAULT: "hsl(var(--accent))", - foreground: "hsl(var(--accent-foreground))", - }, - popover: { - DEFAULT: "hsl(var(--popover))", - foreground: "hsl(var(--popover-foreground))", - }, - card: { - DEFAULT: "hsl(var(--card))", - foreground: "hsl(var(--card-foreground))", - }, - }, - borderRadius: { - lg: "var(--radius)", - md: "calc(var(--radius) - 2px)", - sm: "calc(var(--radius) - 4px)", - }, - keyframes: { - "accordion-down": { - from: { height: "0" }, - to: { height: "var(--radix-accordion-content-height)" }, - }, - "accordion-up": { - from: { height: "var(--radix-accordion-content-height)" }, - to: { height: "0" }, - }, - }, - animation: { - "accordion-down": "accordion-down 0.2s ease-out", - "accordion-up": "accordion-up 0.2s ease-out", - }, - }, - }, - plugins: [require("tailwindcss-animate")], -} satisfies Config; - -export default config; diff --git a/examples/playground/tsconfig.json b/examples/playground/tsconfig.json deleted file mode 100644 index a3ea03902..000000000 --- a/examples/playground/tsconfig.json +++ /dev/null @@ -1,49 +0,0 @@ -{ - "extends": "../../tsconfig.base.json", - "compilerOptions": { - "target": "es5", - "lib": ["dom", "dom.iterable", "esnext"], - "allowJs": true, - "skipLibCheck": true, - "strict": true, - "forceConsistentCasingInFileNames": true, - "noEmit": true, - "esModuleInterop": true, - "module": "esnext", - "moduleResolution": "node", - "resolveJsonModule": true, - "isolatedModules": true, - "jsx": "preserve", - "incremental": true, - "plugins": [ - { - "name": "next" - } - ], - "baseUrl": ".", - "paths": { - "@/*": ["./*"] - }, - "typeRoots": ["./types", "./node_modules/@types"] - }, - "include": [ - "**/*.ts", - "**/*.tsx", - ".next/types/**/*.ts", - "next-env.d.ts", - "../../examples/playground/.next/types/**/*.ts" - ], - "exclude": [ - "node_modules", - "supabase/functions/**/*.ts", - "supabase/functions/**/*.d.ts" - ], - "references": [ - { - "path": "../../pkgs/dsl" - }, - { - "path": "../../pkgs/client" - } - ] -} diff --git a/examples/playground/types/deno/index.d.ts b/examples/playground/types/deno/index.d.ts deleted file mode 100644 index 5a7b11d3b..000000000 --- a/examples/playground/types/deno/index.d.ts +++ /dev/null @@ -1,11 +0,0 @@ -// Minimal Deno runtime stub – enough for @supabase/supabase-js to type-check -declare namespace Deno { - // eslint-disable-next-line @typescript-eslint/no-empty-interface, @typescript-eslint/no-empty-object-type - interface Reader {} - interface Env { - get(key: string): string | undefined; - } - const env: Env; -} - -export {}; // makes this a module and avoids global pollution diff --git a/examples/playground/utils/supabase/check-env-vars.ts b/examples/playground/utils/supabase/check-env-vars.ts deleted file mode 100644 index 7180f45b0..000000000 --- a/examples/playground/utils/supabase/check-env-vars.ts +++ /dev/null @@ -1,6 +0,0 @@ -// This check can be removed -// it is just for tutorial purposes - -export const hasEnvVars = - process.env.NEXT_PUBLIC_SUPABASE_URL && - process.env.NEXT_PUBLIC_SUPABASE_ANON_KEY; diff --git a/examples/playground/utils/supabase/client.ts b/examples/playground/utils/supabase/client.ts deleted file mode 100644 index f38f0fa5e..000000000 --- a/examples/playground/utils/supabase/client.ts +++ /dev/null @@ -1,8 +0,0 @@ -import { createBrowserClient } from '@supabase/ssr'; -import { Database } from '@/supabase/functions/database-types'; - -export const createClient = () => - createBrowserClient( - process.env.NEXT_PUBLIC_SUPABASE_URL!, - process.env.NEXT_PUBLIC_SUPABASE_ANON_KEY!, - ); diff --git a/examples/playground/utils/supabase/middleware.ts b/examples/playground/utils/supabase/middleware.ts deleted file mode 100644 index 9cf629af2..000000000 --- a/examples/playground/utils/supabase/middleware.ts +++ /dev/null @@ -1,65 +0,0 @@ -import { createServerClient } from "@supabase/ssr"; -import { type NextRequest, NextResponse } from "next/server"; - -export const updateSession = async (request: NextRequest) => { - // This `try/catch` block is only here for the interactive tutorial. - // Feel free to remove once you have Supabase connected. - try { - // Create an unmodified response - let response = NextResponse.next({ - request: { - headers: request.headers, - }, - }); - - const supabase = createServerClient( - process.env.NEXT_PUBLIC_SUPABASE_URL!, - process.env.NEXT_PUBLIC_SUPABASE_ANON_KEY!, - { - cookies: { - getAll() { - return request.cookies.getAll(); - }, - setAll(cookiesToSet) { - cookiesToSet.forEach(({ name, value }) => - request.cookies.set(name, value), - ); - response = NextResponse.next({ - request, - }); - cookiesToSet.forEach(({ name, value, options }) => - response.cookies.set(name, value, options), - ); - }, - }, - }, - ); - - // This will refresh session if expired - required for Server Components - // https://supabase.com/docs/guides/auth/server-side/nextjs - const user = await supabase.auth.getUser(); - - // protected routes - if ((request.nextUrl.pathname.startsWith("/protected") || - request.nextUrl.pathname.startsWith("/websites/runs")) && - user.error) { - return NextResponse.redirect(new URL("/sign-in", request.url)); - } - - // Allow access to the websites page for logged-in users only - if (request.nextUrl.pathname === "/websites" && user.error) { - return NextResponse.redirect(new URL("/sign-in", request.url)); - } - - return response; - } catch (e) { - // If you are here, a Supabase client could not be created! - // This is likely because you have not set up environment variables. - // Check out http://localhost:3000 for Next Steps. - return NextResponse.next({ - request: { - headers: request.headers, - }, - }); - } -}; diff --git a/examples/playground/utils/supabase/server.ts b/examples/playground/utils/supabase/server.ts deleted file mode 100644 index 2c00bbc96..000000000 --- a/examples/playground/utils/supabase/server.ts +++ /dev/null @@ -1,29 +0,0 @@ -import { createServerClient } from "@supabase/ssr"; -import { cookies } from "next/headers"; - -export const createClient = async () => { - const cookieStore = await cookies(); - - return createServerClient( - process.env.NEXT_PUBLIC_SUPABASE_URL!, - process.env.NEXT_PUBLIC_SUPABASE_ANON_KEY!, - { - cookies: { - getAll() { - return cookieStore.getAll(); - }, - setAll(cookiesToSet) { - try { - cookiesToSet.forEach(({ name, value, options }) => { - cookieStore.set(name, value, options); - }); - } catch (error) { - // The `set` method was called from a Server Component. - // This can be ignored if you have middleware refreshing - // user sessions. - } - }, - }, - }, - ); -}; diff --git a/examples/playground/utils/utils.ts b/examples/playground/utils/utils.ts deleted file mode 100644 index c9fbbe892..000000000 --- a/examples/playground/utils/utils.ts +++ /dev/null @@ -1,16 +0,0 @@ -import { redirect } from "next/navigation"; - -/** - * Redirects to a specified path with an encoded message as a query parameter. - * @param {('error' | 'success')} type - The type of message, either 'error' or 'success'. - * @param {string} path - The path to redirect to. - * @param {string} message - The message to be encoded and added as a query parameter. - * @returns {never} This function doesn't return as it triggers a redirect. - */ -export function encodedRedirect( - type: "error" | "success", - path: string, - message: string, -) { - return redirect(`${path}?${type}=${encodeURIComponent(message)}`); -} diff --git a/netlify.toml b/netlify.toml deleted file mode 100644 index 817dd971b..000000000 --- a/netlify.toml +++ /dev/null @@ -1,7 +0,0 @@ -[build] - base = "examples/playground" - command = "pnpm build" - publish = ".next" - -[[plugins]] - package = "@netlify/plugin-nextjs" \ No newline at end of file diff --git a/package.json b/package.json index 379e6e8a5..3c08348fa 100644 --- a/package.json +++ b/package.json @@ -5,10 +5,10 @@ "scripts": { "build": "nx run-many --target=build --all", "version": "pnpm changeset version && ./scripts/update-jsr-json-version.sh", - "validate:publish:npm": "pnpm nx run-many -t build --exclude=playground && pnpm publish --dry-run --recursive --filter=!./pkgs/edge-worker", + "validate:publish:npm": "pnpm nx run-many -t build && pnpm publish --dry-run --recursive --filter=!./pkgs/edge-worker", "validate:publish:jsr": "cd ./pkgs/edge-worker && jsr publish --dry-run --allow-slow-types", "validate:publish": "pnpm run validate:publish:npm && pnpm run validate:publish:jsr", - "publish:npm": "pnpm nx run-many -t build --exclude=playground && pnpm publish --recursive --filter=!./pkgs/edge-worker", + "publish:npm": "pnpm nx run-many -t build && pnpm publish --recursive --filter=!./pkgs/edge-worker", "publish:jsr": "cd ./pkgs/edge-worker && jsr publish --allow-slow-types", "changeset:tag": "pnpm changeset tag && git push --follow-tags", "release": "git status && pnpm run validate:publish && pnpm run publish:npm && pnpm run publish:jsr && pnpm run changeset:tag" diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index d5f1f74fe..238b120f2 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -112,108 +112,6 @@ importers: specifier: ^1.3.1 version: 1.3.1(@types/node@18.16.20)(@vitest/ui@1.6.1)(jsdom@22.1.0)(less@4.1.3)(sass-embedded@1.93.3)(sass@1.94.0)(stylus@0.64.0)(terser@5.43.1) - examples/playground: - dependencies: - '@pgflow/client': - specifier: workspace:* - version: link:../../pkgs/client - '@pgflow/dsl': - specifier: workspace:* - version: link:../../pkgs/dsl - '@radix-ui/react-checkbox': - specifier: ^1.1.1 - version: 1.3.3(@types/react-dom@19.0.2(@types/react@19.1.9))(@types/react@19.1.9)(react-dom@19.0.0(react@19.0.0))(react@19.0.0) - '@radix-ui/react-collapsible': - specifier: ^1.1.8 - version: 1.1.12(@types/react-dom@19.0.2(@types/react@19.1.9))(@types/react@19.1.9)(react-dom@19.0.0(react@19.0.0))(react@19.0.0) - '@radix-ui/react-dropdown-menu': - specifier: ^2.1.1 - version: 2.1.16(@types/react-dom@19.0.2(@types/react@19.1.9))(@types/react@19.1.9)(react-dom@19.0.0(react@19.0.0))(react@19.0.0) - '@radix-ui/react-label': - specifier: ^2.1.0 - version: 2.1.8(@types/react-dom@19.0.2(@types/react@19.1.9))(@types/react@19.1.9)(react-dom@19.0.0(react@19.0.0))(react@19.0.0) - '@radix-ui/react-slot': - specifier: ^1.1.0 - version: 1.2.4(@types/react@19.1.9)(react@19.0.0) - '@supabase/ssr': - specifier: latest - version: 0.7.0(@supabase/supabase-js@2.49.4) - '@supabase/supabase-js': - specifier: 2.49.4 - version: 2.49.4 - '@types/react': - specifier: 19.1.9 - version: 19.1.9 - '@types/react-dom': - specifier: 19.0.2 - version: 19.0.2(@types/react@19.1.9) - '@types/uuid': - specifier: 10.0.0 - version: 10.0.0 - autoprefixer: - specifier: 10.4.20 - version: 10.4.20(postcss@8.4.49) - class-variance-authority: - specifier: ^0.7.0 - version: 0.7.1 - clsx: - specifier: ^2.1.1 - version: 2.1.1 - framer-motion: - specifier: ^12.9.2 - version: 12.23.24(react-dom@19.0.0(react@19.0.0))(react@19.0.0) - lucide-react: - specifier: ^0.468.0 - version: 0.468.0(react@19.0.0) - nanoevents: - specifier: 7.0.1 - version: 7.0.1 - next: - specifier: 15.0.3 - version: 15.0.3(@babel/core@7.28.5)(@opentelemetry/api@1.8.0)(babel-plugin-macros@3.1.0)(react-dom@19.0.0(react@19.0.0))(react@19.0.0)(sass@1.94.0) - next-plausible: - specifier: ^3.12.4 - version: 3.12.5(next@15.0.3(@babel/core@7.28.5)(@opentelemetry/api@1.8.0)(babel-plugin-macros@3.1.0)(react-dom@19.0.0(react@19.0.0))(react@19.0.0)(sass@1.94.0))(react-dom@19.0.0(react@19.0.0))(react@19.0.0) - next-themes: - specifier: ^0.4.3 - version: 0.4.6(react-dom@19.0.0(react@19.0.0))(react@19.0.0) - postcss: - specifier: 8.4.49 - version: 8.4.49 - postgres: - specifier: 3.4.5 - version: 3.4.5 - react: - specifier: 19.0.0 - version: 19.0.0 - react-dom: - specifier: 19.0.0 - version: 19.0.0(react@19.0.0) - sharp: - specifier: 0.32.6 - version: 0.32.6 - supabase: - specifier: 2.21.1 - version: 2.21.1 - tailwind-merge: - specifier: 2.6.0 - version: 2.6.0 - tailwindcss: - specifier: 3.4.17 - version: 3.4.17(ts-node@10.9.2(@types/node@18.16.20)(typescript@5.8.3)) - tailwindcss-animate: - specifier: 1.0.7 - version: 1.0.7(tailwindcss@3.4.17(ts-node@10.9.2(@types/node@18.16.20)(typescript@5.8.3))) - terser: - specifier: 5.43.1 - version: 5.43.1 - typescript: - specifier: 5.8.3 - version: 5.8.3 - uuid: - specifier: 9.0.1 - version: 9.0.1 - pkgs/cli: dependencies: '@clack/prompts': @@ -419,10 +317,6 @@ packages: '@adobe/css-tools@4.3.3': resolution: {integrity: sha512-rE0Pygv0sEZ4vBWHlAgJLGDU7Pm8xoO6p3wsEceb7GYAjScrOHpEo8KK/eVkAcnSM+slAEtXjA2JpdjLp4fJQQ==} - '@alloc/quick-lru@5.2.0': - resolution: {integrity: sha512-UrcABB+4bUrFABwbluTIBErXwvbsU/V7TZWfmbgJfbkwiBuziS9gxdODUyuiecfdGQ85jglMW6juS3+z5TsKLw==} - engines: {node: '>=10'} - '@ampproject/remapping@2.3.0': resolution: {integrity: sha512-30iZtAPgz+LTIYoeivqYo853f02jBYSd5uGnGpkFV0M3xOt9aN73erkgYAmZU43x4VfqcnLxW9Kpg3R5LC4YYw==} engines: {node: '>=6.0.0'} @@ -2522,21 +2416,6 @@ packages: '@fastify/static@7.0.4': resolution: {integrity: sha512-p2uKtaf8BMOZWLs6wu+Ihg7bWNBdjNgCwDza4MJtTqg+5ovKmcbgbR9Xs5/smZ1YISfzKOCNYmZV8LaCj+eJ1Q==} - '@floating-ui/core@1.7.3': - resolution: {integrity: sha512-sGnvb5dmrJaKEZ+LDIpguvdX3bDlEllmv4/ClQ9awcmCZrlx5jQyyMWFM5kBI+EyNOCDDiKk8il0zeuX3Zlg/w==} - - '@floating-ui/dom@1.7.4': - resolution: {integrity: sha512-OOchDgh4F2CchOX94cRVqhvy7b3AFb+/rQXyswmzmGakRfkMgoWVjfnLWkRirfLEfuD4ysVW16eXzwt3jHIzKA==} - - '@floating-ui/react-dom@2.1.6': - resolution: {integrity: sha512-4JX6rEatQEvlmgU80wZyq9RT96HZJa88q8hp0pBd+LrczeDI4o6uA2M+uvxngVHo4Ihr8uibXxH6+70zhAFrVw==} - peerDependencies: - react: '>=16.8.0' - react-dom: '>=16.8.0' - - '@floating-ui/utils@0.2.10': - resolution: {integrity: sha512-aGTxbpbg8/b5JfU1HXSrbH3wXZuLPJcNEcZQFMxLs3oSzgtVu6nFPkbbGGUvBcUjKV2YyB9Wxxabo+HEH9tcRQ==} - '@henrygd/queue@1.1.1': resolution: {integrity: sha512-U6qjF4kIK7K72e74tmHyi2TWjSbwzKmnOlu9HOz1l6JFCeM9Sqe3znSoyK/kst+rie7Q8FvTqVxcBjG+Q7vJYQ==} @@ -3895,342 +3774,6 @@ packages: '@poppinss/exception@1.2.2': resolution: {integrity: sha512-m7bpKCD4QMlFCjA/nKTs23fuvoVFoA83brRKmObCUNmi/9tVu8Ve3w4YQAnJu4q3Tjf5fr685HYIC/IA2zHRSg==} - '@radix-ui/primitive@1.1.3': - resolution: {integrity: sha512-JTF99U/6XIjCBo0wqkU5sK10glYe27MRRsfwoiq5zzOEZLHU3A3KCMa5X/azekYRCJ0HlwI0crAXS/5dEHTzDg==} - - '@radix-ui/react-arrow@1.1.7': - resolution: {integrity: sha512-F+M1tLhO+mlQaOWspE8Wstg+z6PwxwRd8oQ8IXceWz92kfAmalTRf0EjrouQeo7QssEPfCn05B4Ihs1K9WQ/7w==} - peerDependencies: - '@types/react': '*' - '@types/react-dom': '*' - react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc - react-dom: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc - peerDependenciesMeta: - '@types/react': - optional: true - '@types/react-dom': - optional: true - - '@radix-ui/react-checkbox@1.3.3': - resolution: {integrity: sha512-wBbpv+NQftHDdG86Qc0pIyXk5IR3tM8Vd0nWLKDcX8nNn4nXFOFwsKuqw2okA/1D/mpaAkmuyndrPJTYDNZtFw==} - peerDependencies: - '@types/react': '*' - '@types/react-dom': '*' - react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc - react-dom: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc - peerDependenciesMeta: - '@types/react': - optional: true - '@types/react-dom': - optional: true - - '@radix-ui/react-collapsible@1.1.12': - resolution: {integrity: sha512-Uu+mSh4agx2ib1uIGPP4/CKNULyajb3p92LsVXmH2EHVMTfZWpll88XJ0j4W0z3f8NK1eYl1+Mf/szHPmcHzyA==} - peerDependencies: - '@types/react': '*' - '@types/react-dom': '*' - react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc - react-dom: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc - peerDependenciesMeta: - '@types/react': - optional: true - '@types/react-dom': - optional: true - - '@radix-ui/react-collection@1.1.7': - resolution: {integrity: sha512-Fh9rGN0MoI4ZFUNyfFVNU4y9LUz93u9/0K+yLgA2bwRojxM8JU1DyvvMBabnZPBgMWREAJvU2jjVzq+LrFUglw==} - peerDependencies: - '@types/react': '*' - '@types/react-dom': '*' - react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc - react-dom: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc - peerDependenciesMeta: - '@types/react': - optional: true - '@types/react-dom': - optional: true - - '@radix-ui/react-compose-refs@1.1.2': - resolution: {integrity: sha512-z4eqJvfiNnFMHIIvXP3CY57y2WJs5g2v3X0zm9mEJkrkNv4rDxu+sg9Jh8EkXyeqBkB7SOcboo9dMVqhyrACIg==} - peerDependencies: - '@types/react': '*' - react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc - peerDependenciesMeta: - '@types/react': - optional: true - - '@radix-ui/react-context@1.1.2': - resolution: {integrity: sha512-jCi/QKUM2r1Ju5a3J64TH2A5SpKAgh0LpknyqdQ4m6DCV0xJ2HG1xARRwNGPQfi1SLdLWZ1OJz6F4OMBBNiGJA==} - peerDependencies: - '@types/react': '*' - react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc - peerDependenciesMeta: - '@types/react': - optional: true - - '@radix-ui/react-direction@1.1.1': - resolution: {integrity: sha512-1UEWRX6jnOA2y4H5WczZ44gOOjTEmlqv1uNW4GAJEO5+bauCBhv8snY65Iw5/VOS/ghKN9gr2KjnLKxrsvoMVw==} - peerDependencies: - '@types/react': '*' - react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc - peerDependenciesMeta: - '@types/react': - optional: true - - '@radix-ui/react-dismissable-layer@1.1.11': - resolution: {integrity: sha512-Nqcp+t5cTB8BinFkZgXiMJniQH0PsUt2k51FUhbdfeKvc4ACcG2uQniY/8+h1Yv6Kza4Q7lD7PQV0z0oicE0Mg==} - peerDependencies: - '@types/react': '*' - '@types/react-dom': '*' - react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc - react-dom: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc - peerDependenciesMeta: - '@types/react': - optional: true - '@types/react-dom': - optional: true - - '@radix-ui/react-dropdown-menu@2.1.16': - resolution: {integrity: sha512-1PLGQEynI/3OX/ftV54COn+3Sud/Mn8vALg2rWnBLnRaGtJDduNW/22XjlGgPdpcIbiQxjKtb7BkcjP00nqfJw==} - peerDependencies: - '@types/react': '*' - '@types/react-dom': '*' - react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc - react-dom: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc - peerDependenciesMeta: - '@types/react': - optional: true - '@types/react-dom': - optional: true - - '@radix-ui/react-focus-guards@1.1.3': - resolution: {integrity: sha512-0rFg/Rj2Q62NCm62jZw0QX7a3sz6QCQU0LpZdNrJX8byRGaGVTqbrW9jAoIAHyMQqsNpeZ81YgSizOt5WXq0Pw==} - peerDependencies: - '@types/react': '*' - react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc - peerDependenciesMeta: - '@types/react': - optional: true - - '@radix-ui/react-focus-scope@1.1.7': - resolution: {integrity: sha512-t2ODlkXBQyn7jkl6TNaw/MtVEVvIGelJDCG41Okq/KwUsJBwQ4XVZsHAVUkK4mBv3ewiAS3PGuUWuY2BoK4ZUw==} - peerDependencies: - '@types/react': '*' - '@types/react-dom': '*' - react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc - react-dom: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc - peerDependenciesMeta: - '@types/react': - optional: true - '@types/react-dom': - optional: true - - '@radix-ui/react-id@1.1.1': - resolution: {integrity: sha512-kGkGegYIdQsOb4XjsfM97rXsiHaBwco+hFI66oO4s9LU+PLAC5oJ7khdOVFxkhsmlbpUqDAvXw11CluXP+jkHg==} - peerDependencies: - '@types/react': '*' - react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc - peerDependenciesMeta: - '@types/react': - optional: true - - '@radix-ui/react-label@2.1.8': - resolution: {integrity: sha512-FmXs37I6hSBVDlO4y764TNz1rLgKwjJMQ0EGte6F3Cb3f4bIuHB/iLa/8I9VKkmOy+gNHq8rql3j686ACVV21A==} - peerDependencies: - '@types/react': '*' - '@types/react-dom': '*' - react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc - react-dom: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc - peerDependenciesMeta: - '@types/react': - optional: true - '@types/react-dom': - optional: true - - '@radix-ui/react-menu@2.1.16': - resolution: {integrity: sha512-72F2T+PLlphrqLcAotYPp0uJMr5SjP5SL01wfEspJbru5Zs5vQaSHb4VB3ZMJPimgHHCHG7gMOeOB9H3Hdmtxg==} - peerDependencies: - '@types/react': '*' - '@types/react-dom': '*' - react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc - react-dom: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc - peerDependenciesMeta: - '@types/react': - optional: true - '@types/react-dom': - optional: true - - '@radix-ui/react-popper@1.2.8': - resolution: {integrity: sha512-0NJQ4LFFUuWkE7Oxf0htBKS6zLkkjBH+hM1uk7Ng705ReR8m/uelduy1DBo0PyBXPKVnBA6YBlU94MBGXrSBCw==} - peerDependencies: - '@types/react': '*' - '@types/react-dom': '*' - react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc - react-dom: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc - peerDependenciesMeta: - '@types/react': - optional: true - '@types/react-dom': - optional: true - - '@radix-ui/react-portal@1.1.9': - resolution: {integrity: sha512-bpIxvq03if6UNwXZ+HTK71JLh4APvnXntDc6XOX8UVq4XQOVl7lwok0AvIl+b8zgCw3fSaVTZMpAPPagXbKmHQ==} - peerDependencies: - '@types/react': '*' - '@types/react-dom': '*' - react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc - react-dom: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc - peerDependenciesMeta: - '@types/react': - optional: true - '@types/react-dom': - optional: true - - '@radix-ui/react-presence@1.1.5': - resolution: {integrity: sha512-/jfEwNDdQVBCNvjkGit4h6pMOzq8bHkopq458dPt2lMjx+eBQUohZNG9A7DtO/O5ukSbxuaNGXMjHicgwy6rQQ==} - peerDependencies: - '@types/react': '*' - '@types/react-dom': '*' - react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc - react-dom: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc - peerDependenciesMeta: - '@types/react': - optional: true - '@types/react-dom': - optional: true - - '@radix-ui/react-primitive@2.1.3': - resolution: {integrity: sha512-m9gTwRkhy2lvCPe6QJp4d3G1TYEUHn/FzJUtq9MjH46an1wJU+GdoGC5VLof8RX8Ft/DlpshApkhswDLZzHIcQ==} - peerDependencies: - '@types/react': '*' - '@types/react-dom': '*' - react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc - react-dom: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc - peerDependenciesMeta: - '@types/react': - optional: true - '@types/react-dom': - optional: true - - '@radix-ui/react-primitive@2.1.4': - resolution: {integrity: sha512-9hQc4+GNVtJAIEPEqlYqW5RiYdrr8ea5XQ0ZOnD6fgru+83kqT15mq2OCcbe8KnjRZl5vF3ks69AKz3kh1jrhg==} - peerDependencies: - '@types/react': '*' - '@types/react-dom': '*' - react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc - react-dom: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc - peerDependenciesMeta: - '@types/react': - optional: true - '@types/react-dom': - optional: true - - '@radix-ui/react-roving-focus@1.1.11': - resolution: {integrity: sha512-7A6S9jSgm/S+7MdtNDSb+IU859vQqJ/QAtcYQcfFC6W8RS4IxIZDldLR0xqCFZ6DCyrQLjLPsxtTNch5jVA4lA==} - peerDependencies: - '@types/react': '*' - '@types/react-dom': '*' - react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc - react-dom: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc - peerDependenciesMeta: - '@types/react': - optional: true - '@types/react-dom': - optional: true - - '@radix-ui/react-slot@1.2.3': - resolution: {integrity: sha512-aeNmHnBxbi2St0au6VBVC7JXFlhLlOnvIIlePNniyUNAClzmtAUEY8/pBiK3iHjufOlwA+c20/8jngo7xcrg8A==} - peerDependencies: - '@types/react': '*' - react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc - peerDependenciesMeta: - '@types/react': - optional: true - - '@radix-ui/react-slot@1.2.4': - resolution: {integrity: sha512-Jl+bCv8HxKnlTLVrcDE8zTMJ09R9/ukw4qBs/oZClOfoQk/cOTbDn+NceXfV7j09YPVQUryJPHurafcSg6EVKA==} - peerDependencies: - '@types/react': '*' - react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc - peerDependenciesMeta: - '@types/react': - optional: true - - '@radix-ui/react-use-callback-ref@1.1.1': - resolution: {integrity: sha512-FkBMwD+qbGQeMu1cOHnuGB6x4yzPjho8ap5WtbEJ26umhgqVXbhekKUQO+hZEL1vU92a3wHwdp0HAcqAUF5iDg==} - peerDependencies: - '@types/react': '*' - react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc - peerDependenciesMeta: - '@types/react': - optional: true - - '@radix-ui/react-use-controllable-state@1.2.2': - resolution: {integrity: sha512-BjasUjixPFdS+NKkypcyyN5Pmg83Olst0+c6vGov0diwTEo6mgdqVR6hxcEgFuh4QrAs7Rc+9KuGJ9TVCj0Zzg==} - peerDependencies: - '@types/react': '*' - react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc - peerDependenciesMeta: - '@types/react': - optional: true - - '@radix-ui/react-use-effect-event@0.0.2': - resolution: {integrity: sha512-Qp8WbZOBe+blgpuUT+lw2xheLP8q0oatc9UpmiemEICxGvFLYmHm9QowVZGHtJlGbS6A6yJ3iViad/2cVjnOiA==} - peerDependencies: - '@types/react': '*' - react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc - peerDependenciesMeta: - '@types/react': - optional: true - - '@radix-ui/react-use-escape-keydown@1.1.1': - resolution: {integrity: sha512-Il0+boE7w/XebUHyBjroE+DbByORGR9KKmITzbR7MyQ4akpORYP/ZmbhAr0DG7RmmBqoOnZdy2QlvajJ2QA59g==} - peerDependencies: - '@types/react': '*' - react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc - peerDependenciesMeta: - '@types/react': - optional: true - - '@radix-ui/react-use-layout-effect@1.1.1': - resolution: {integrity: sha512-RbJRS4UWQFkzHTTwVymMTUv8EqYhOp8dOOviLj2ugtTiXRaRQS7GLGxZTLL1jWhMeoSCf5zmcZkqTl9IiYfXcQ==} - peerDependencies: - '@types/react': '*' - react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc - peerDependenciesMeta: - '@types/react': - optional: true - - '@radix-ui/react-use-previous@1.1.1': - resolution: {integrity: sha512-2dHfToCj/pzca2Ck724OZ5L0EVrr3eHRNsG/b3xQJLA2hZpVCS99bLAX+hm1IHXDEnzU6by5z/5MIY794/a8NQ==} - peerDependencies: - '@types/react': '*' - react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc - peerDependenciesMeta: - '@types/react': - optional: true - - '@radix-ui/react-use-rect@1.1.1': - resolution: {integrity: sha512-QTYuDesS0VtuHNNvMh+CjlKJ4LJickCMUAqjlE3+j8w+RlRpwyX3apEQKGFzbZGdo7XNG1tXa+bQqIE7HIXT2w==} - peerDependencies: - '@types/react': '*' - react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc - peerDependenciesMeta: - '@types/react': - optional: true - - '@radix-ui/react-use-size@1.1.1': - resolution: {integrity: sha512-ewrXRDTAqAXlkl6t/fkXWNAhFX9I+CkKlw6zjEwk86RSPKwZr3xpBRso655aqYafwtnbpHLj6toFzmd6xdVptQ==} - peerDependencies: - '@types/react': '*' - react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc - peerDependenciesMeta: - '@types/react': - optional: true - - '@radix-ui/rect@1.1.1': - resolution: {integrity: sha512-HPwpGIzkl28mWyZqG52jiqDJ12waP11Pa1lGoiyUkIEuMLBP0oeK/C89esbXrxsky5we7dfd8U58nm0SgAWpVw==} - '@rolldown/pluginutils@1.0.0-beta.27': resolution: {integrity: sha512-+d0F4MKMCbeVUJwG96uQ4SgAznZNSq93I3V+9NHA4OpvqG8mRCpGdKmK8l/dl02h2CCDHwW2FqilnTyDcAnqjA==} @@ -4529,53 +4072,26 @@ packages: '@speed-highlight/core@1.2.12': resolution: {integrity: sha512-uilwrK0Ygyri5dToHYdZSjcvpS2ZwX0w5aSt3GCEN9hrjxWCoeV4Z2DTXuxjwbntaLQIEEAlCeNQss5SoHvAEA==} - '@supabase/auth-js@2.69.1': - resolution: {integrity: sha512-FILtt5WjCNzmReeRLq5wRs3iShwmnWgBvxHfqapC/VoljJl+W8hDAyFmf1NVw3zH+ZjZ05AKxiKxVeb0HNWRMQ==} - '@supabase/auth-js@2.81.0': resolution: {integrity: sha512-mWyRPO+XUo19MHNBFg5qdH8cMIyxRNj9HXhwkwToxDHYRZWru96hWZFCVb7trOrTpPVe4TgLer2yy3KMvYBMPw==} engines: {node: '>=20.0.0'} - '@supabase/functions-js@2.4.4': - resolution: {integrity: sha512-WL2p6r4AXNGwop7iwvul2BvOtuJ1YQy8EbOd0dhG1oN1q8el/BIRSFCFnWAMM/vJJlHWLi4ad22sKbKr9mvjoA==} - '@supabase/functions-js@2.81.0': resolution: {integrity: sha512-yxxIGbXm1TtRpP5VwXKEZIdQMd2XUrWS1xt3zPF3jMItX5dXfdpbz5YRPY3IfebR8gXB113d/APWvYLiNuzI1Q==} engines: {node: '>=20.0.0'} - '@supabase/node-fetch@2.6.15': - resolution: {integrity: sha512-1ibVeYUacxWYi9i0cf5efil6adJ9WRyZBLivgjs+AUpewx1F3xPi7gLgaASI2SmIQxPoCEjAsLAzKPgMJVgOUQ==} - engines: {node: 4.x || >=6.0.0} - - '@supabase/postgrest-js@1.19.4': - resolution: {integrity: sha512-O4soKqKtZIW3olqmbXXbKugUtByD2jPa8kL2m2c1oozAO11uCcGrRhkZL0kVxjBLrXHE0mdSkFsMj7jDSfyNpw==} - '@supabase/postgrest-js@2.81.0': resolution: {integrity: sha512-HdybTRf5Sy+gBxzgwkag+WkvV8QqMXhnKQ383YG51lCbm8p82CuCcUTzGy2xFHiA2ZXnnlkSzrfw8uKFAiAiog==} engines: {node: '>=20.0.0'} - '@supabase/realtime-js@2.11.2': - resolution: {integrity: sha512-u/XeuL2Y0QEhXSoIPZZwR6wMXgB+RQbJzG9VErA3VghVt7uRfSVsjeqd7m5GhX3JR6dM/WRmLbVR8URpDWG4+w==} - '@supabase/realtime-js@2.81.0': resolution: {integrity: sha512-WCL9kMbmHQNGAG4ep+jfU22+h9OiQVv7bbkOmLy4gwlqtE+SJszkAtRp3l3xthqYkbxHbIqGc/BlHv3Dh79cXg==} engines: {node: '>=20.0.0'} - '@supabase/ssr@0.7.0': - resolution: {integrity: sha512-G65t5EhLSJ5c8hTCcXifSL9Q/ZRXvqgXeNo+d3P56f4U1IxwTqjB64UfmfixvmMcjuxnq2yGqEWVJqUcO+AzAg==} - peerDependencies: - '@supabase/supabase-js': ^2.43.4 - - '@supabase/storage-js@2.7.1': - resolution: {integrity: sha512-asYHcyDR1fKqrMpytAS1zjyEfvxuOIp1CIXX7ji4lHHcJKqyk+sLl/Vxgm4sN6u8zvuUtae9e4kDxQP2qrwWBA==} - '@supabase/storage-js@2.81.0': resolution: {integrity: sha512-gj9u+EyEVLgDA9jW8JOsAgEc8H79zg01STK5KLv9EU45kf5Qh7kAoCmG090Jkp/YEGvSiaR/Ta7Xs/gUTLqflw==} engines: {node: '>=20.0.0'} - '@supabase/supabase-js@2.49.4': - resolution: {integrity: sha512-jUF0uRUmS8BKt37t01qaZ88H9yV1mbGYnqLeuFWLcdV+x1P4fl0yP9DGtaEhFPZcwSom7u16GkLEH9QJZOqOkw==} - '@supabase/supabase-js@2.81.0': resolution: {integrity: sha512-FkiqUYCzsT92V/mfvoFueszkQrPqSTHgXhN9ADqeMpY5j0tUqeAZu8g2ptLYiDmx1pBbh4xoiqxWAf3UDIv4Bw==} engines: {node: '>=20.0.0'} @@ -4838,19 +4354,11 @@ packages: '@types/range-parser@1.2.7': resolution: {integrity: sha512-hKormJbkJqzQGhziax5PItDUTMAM9uE2XXQmM37dyd4hVM+5aVl7oVxMVUiVQn2oCQFN/LKCZdvSM0pFRqbSmQ==} - '@types/react-dom@19.0.2': - resolution: {integrity: sha512-c1s+7TKFaDRRxr1TxccIX2u7sfCnc3RxkVyBIUA2lCpyqCF+QoAwQ/CBg7bsMdVwP120HEH143VQezKtef5nCg==} - peerDependencies: - '@types/react': ^19.0.0 - '@types/react-dom@19.2.2': resolution: {integrity: sha512-9KQPoO6mZCi7jcIStSnlOWn2nEF3mNmyr3rIAsGnAbQKYbRLyqmeSc39EVgtxXVia+LMT8j3knZLAZAh+xLmrw==} peerDependencies: '@types/react': ^19.2.0 - '@types/react@19.1.9': - resolution: {integrity: sha512-WmdoynAX8Stew/36uTSVMcLJJ1KRh6L3IZRx1PZ7qJtBqT3dYTgyDTx8H1qoRghErydW7xw9mSJ3wS//tCRpFA==} - '@types/react@19.2.2': resolution: {integrity: sha512-6mDvHUFSjyT2B2yeNx2nUgMxh9LtOWvkhIU3uePn2I2oyNymUAX1NIsdgviM4CH+JSrp2D2hsMvJOkxY+0wNRA==} @@ -5440,9 +4948,6 @@ packages: resolution: {integrity: sha512-HqZ5rWlFjGiV0tDm3UxxgNRqsOTniqoKZu0pIAfh7TZQMGuZK+hH0drySty0si0QXj1ieop4+SkSfPZBPPkHig==} engines: {node: '>=14'} - any-promise@1.3.0: - resolution: {integrity: sha512-7UvmKalWRt1wgjL1RrGxoSJW/0QZFIegpeGvZG9kjp8vrRu55XTHbwnqq2GpXm9uLbcuhxm3IqX9OB4MZR1b2A==} - anymatch@3.1.3: resolution: {integrity: sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==} engines: {node: '>= 8'} @@ -5467,10 +4972,6 @@ packages: argparse@2.0.1: resolution: {integrity: sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==} - aria-hidden@1.2.6: - resolution: {integrity: sha512-ik3ZgC9dY/lYVVM++OISsaYDeg1tb0VtP5uL3ouh1koGOaUMDPpbFIei4JkFimWUFPn90sbMNMXQAIVOlnYKJA==} - engines: {node: '>=10'} - aria-query@5.3.2: resolution: {integrity: sha512-COROpnaoap1E2F000S62r6A60uHZnmlvomhfyT2DlTcrY1OrBKn2UhH7qn5wTC9zMvD0AY7csdPSNwKP+7WiQw==} engines: {node: '>= 0.4'} @@ -5857,10 +5358,6 @@ packages: resolution: {integrity: sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==} engines: {node: '>=6'} - camelcase-css@2.0.1: - resolution: {integrity: sha512-QOSvevhslijgYwRx6Rv7zKdMF8lbRmx+uQGx2+vDc+KI/eBnsy9kit5aj23AgGu3pa4t9AgwbnXWqS+iOY+2aA==} - engines: {node: '>= 6'} - camelcase@5.3.1: resolution: {integrity: sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==} engines: {node: '>=6'} @@ -5964,9 +5461,6 @@ packages: cjs-module-lexer@1.4.3: resolution: {integrity: sha512-9z8TZaGM1pfswYeXrUpzPrkx8UnWYdhJclsiYMm6x/w5+nN+8Tf/LnAgfLGQCm59qAOxU8WwHEq2vNwF6i4j+Q==} - class-variance-authority@0.7.1: - resolution: {integrity: sha512-Ka+9Trutv7G8M6WT6SeiRWz792K5qEqIGEGzXKhAE6xOWAY6pPH8U+9IY3oCMv6kqTmLsv7Xh/2w2RigkePMsg==} - clean-deep@3.4.0: resolution: {integrity: sha512-Lo78NV5ItJL/jl+B5w0BycAisaieJGXK1qYi/9m4SjR8zbqmrUtO7Yhro40wEShGmmxs/aJLI/A+jNhdkXK8mw==} engines: {node: '>=4'} @@ -6117,10 +5611,6 @@ packages: commander@2.20.3: resolution: {integrity: sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==} - commander@4.1.1: - resolution: {integrity: sha512-NOKm8xhkzAjzFx8B2v5OAHT+u5pRQc2UCa2Vq9jYL/31o2wi9mxBA7LIFs3sV5VSC49z6pEhfbMULvShKj26WA==} - engines: {node: '>= 6'} - commander@7.2.0: resolution: {integrity: sha512-QrWXB+ZQSVPmIWIhtEO9H+gwHaMGYiF5ChvoJ+K9ZGHG/sVsa6yiesAD1GC/x46sET00Xlwo1u49RVVVzvcSkw==} engines: {node: '>= 10'} @@ -6547,9 +6037,6 @@ packages: resolution: {integrity: sha512-TLz+x/vEXm/Y7P7wn1EJFNLxYpUD4TgMosxY6fAVJUnJMbupHBOncxyWUG9OpTaH9EBD7uFI5LfEgmMOc54DsA==} engines: {node: '>=8'} - detect-node-es@1.1.0: - resolution: {integrity: sha512-ypdmJU/TbBby2Dxibuv7ZLW3Bs1QEmM7nHjEANfohJLvE0XVujisn1qPJcZxg+qDucsr+bP6fLD1rPS3AhJ7EQ==} - detect-node@2.1.0: resolution: {integrity: sha512-T0NIuQpnTvFDATNuHN5roPwSBG83rFsuO+MXXH9/3N1eFbn4wcPjttvjMLEPWJ0RGUYgQE7cGgS3tNxbqCGM7g==} @@ -6614,9 +6101,6 @@ packages: dfa@1.2.0: resolution: {integrity: sha512-ED3jP8saaweFTjeGX8HQPjeC1YYyZs98jGNZx6IiBvxW7JG5v492kamAQB3m2wop07CvU/RQmzcKr6bgcC5D/Q==} - didyoumean@1.2.2: - resolution: {integrity: sha512-gxtyfqMg7GKyhQmb056K7M3xszy/myH8w+B4RT+QXBQsvAOdc3XymqDDPHx1BgPgsdAA5SIifona89YtRATDzw==} - diff-sequences@29.6.3: resolution: {integrity: sha512-EjePK1srD3P08o2j4f0ExnylqRs5B9tJjcp9t1krH2qRi8CCdsYfwe9JgSLurFBWwq4uOlipzfk5fHNvwFKr8Q==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} @@ -7321,20 +6805,6 @@ packages: fraction.js@4.3.7: resolution: {integrity: sha512-ZsDfxO51wGAXREY55a7la9LScWpwv9RxIrYABrlvOFBlH/ShPnrtsXeuUIfXKKOVicNxQ+o8JTbJvjS4M89yew==} - framer-motion@12.23.24: - resolution: {integrity: sha512-HMi5HRoRCTou+3fb3h9oTLyJGBxHfW+HnNE25tAXOvVx/IvwMHK0cx7IR4a2ZU6sh3IX1Z+4ts32PcYBOqka8w==} - peerDependencies: - '@emotion/is-prop-valid': '*' - react: ^18.0.0 || ^19.0.0 - react-dom: ^18.0.0 || ^19.0.0 - peerDependenciesMeta: - '@emotion/is-prop-valid': - optional: true - react: - optional: true - react-dom: - optional: true - fresh@0.5.2: resolution: {integrity: sha512-zJ2mQYM18rEFOudeV4GShTGIQ7RbzA7ozbU9I/XBpm7kqgMywgmylMwXHxZJmkVoYkna9d2pVXVXPdYTP9ej8Q==} engines: {node: '>= 0.6'} @@ -7413,10 +6883,6 @@ packages: resolution: {integrity: sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==} engines: {node: '>= 0.4'} - get-nonce@1.0.1: - resolution: {integrity: sha512-FJhYRoDaiatfEkUK8HKlicmu/3SGFD51q3itKDGoSTysQJBnfOcxU5GxnhE1E6soB76MbT0MBtnKJuXyAx+96Q==} - engines: {node: '>=6'} - get-package-name@2.2.0: resolution: {integrity: sha512-LmCKVxioe63Fy6KDAQ/mmCSOSSRUE/x4zdrMD+7dU8quF3bGpzvP8mOmq4Dgce3nzU9AgkVDotucNOOg7c27BQ==} engines: {node: '>= 12.0.0'} @@ -8256,10 +7722,6 @@ packages: resolution: {integrity: sha512-eIz2msL/EzL9UFTFFx7jBTkeZfku0yUAyZZZmJ93H2TYEiroIx2PQjEXcwYtYl8zXCxb+PAmA2hLIt/6ZEkPHw==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - jiti@1.21.7: - resolution: {integrity: sha512-/imKNG4EbWNrVjoNC/1H5/9GFy+tqjGBHCaSsN+P2RnPqjsLmv6UD3Ej+Kj8nBWaRAwyk7kK5ZUc+OEatnTR3A==} - hasBin: true - jiti@2.4.2: resolution: {integrity: sha512-rg9zJN+G4n2nfJl5MW3BMygZX56zKPNVEYYqq7adpmMh4Jn2QNEwhvQlFy6jPVdcod7txZtKHWnyZiA3a0zP7A==} hasBin: true @@ -8625,11 +8087,6 @@ packages: resolution: {integrity: sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==} engines: {node: '>=10'} - lucide-react@0.468.0: - resolution: {integrity: sha512-6koYRhnM2N0GGZIdXzSeiNwguv1gt/FAjZOiPl76roBi3xKEXa4WmfpxgQwTTL4KipXjefrnf3oV4IsYhi4JFA==} - peerDependencies: - react: ^16.5.1 || ^17.0.0 || ^18.0.0 || ^19.0.0-rc - luxon@3.7.2: resolution: {integrity: sha512-vtEhXh/gNjI9Yg1u4jX/0YVPMvxzHuGgCm6tC5kZyb08yjGWGnqAjGJvcXbqQR2P3MyMEFnRbpcdFS6PBcLqew==} engines: {node: '>=12'} @@ -9038,12 +8495,6 @@ packages: moize@6.1.6: resolution: {integrity: sha512-vSKdIUO61iCmTqhdoIDrqyrtp87nWZUmBPniNjO0fX49wEYmyDO4lvlnFXiGcaH1JLE/s/9HbiK4LSHsbiUY6Q==} - motion-dom@12.23.23: - resolution: {integrity: sha512-n5yolOs0TQQBRUFImrRfs/+6X4p3Q4n1dUEqt/H58Vx7OW6RF+foWEgmTVDhIWJIMXOuNNL0apKH2S16en9eiA==} - - motion-utils@12.23.6: - resolution: {integrity: sha512-eAWoPgr4eFEOFfg2WjIsMoqJTW6Z8MTUCgn/GZ3VRpClWBdnbjryiA3ZSNLyxCTmCQx4RmYX6jX1iWHbenUPNQ==} - move-file@3.1.0: resolution: {integrity: sha512-4aE3U7CCBWgrQlQDMq8da4woBWDGHioJFiOZ8Ie6Yq2uwYQ9V2kGhTz4x3u6Wc+OU17nw0yc3rJ/lQ4jIiPe3A==} engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} @@ -9082,9 +8533,6 @@ packages: mute-stream@0.0.8: resolution: {integrity: sha512-nnbWWOkoWyUsTjKrhgD0dcz22mdkSnpYqbEjIm2nhwhuxlSkpywJmBo8h0ZqJdkp73mb90SssHkN4rsRaBAfAA==} - mz@2.7.0: - resolution: {integrity: sha512-z81GNO7nnYMEhrGh9LeymoE4+Yr0Wn5McHIZMK5cfQCl+NDX08sCZgUc9/6MHni9IWuFLm1Z3HTCXu2z9fN62Q==} - nan@2.23.1: resolution: {integrity: sha512-r7bBUGKzlqk8oPBDYxt6Z0aEdF1G1rwlMcLk8LCOMbOzf0mG+JUfUzG4fIMWwHWP0iyaLWEQZJmtB7nOHEm/qw==} @@ -9134,19 +8582,6 @@ packages: netlify-redirector@0.5.0: resolution: {integrity: sha512-4zdzIP+6muqPCuE8avnrgDJ6KW/2+UpHTRcTbMXCIRxiRmyrX+IZ4WSJGZdHPWF3WmQpXpy603XxecZ9iygN7w==} - next-plausible@3.12.5: - resolution: {integrity: sha512-l1YMuTI9akb2u7z4hyTuxXpudy8KfSteRNXCYpWpnhAoBjaWQlv6sITai1TwcR7wWvVW8DFbLubvMQAsirAjcA==} - peerDependencies: - next: '^11.1.0 || ^12.0.0 || ^13.0.0 || ^14.0.0 || ^15.0.0 || ^16.0.0 ' - react: ^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 - react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 - - next-themes@0.4.6: - resolution: {integrity: sha512-pZvgD5L0IEvX5/9GWyHMf3m8BKiVQwsCMHfoFosXtXBMnaS0ZnIJ9ST4b4NqLVKDEm8QBxoNNGNaBv2JNF6XNA==} - peerDependencies: - react: ^16.8 || ^17 || ^18 || ^19 || ^19.0.0-rc - react-dom: ^16.8 || ^17 || ^18 || ^19 || ^19.0.0-rc - next@15.0.3: resolution: {integrity: sha512-ontCbCRKJUIoivAdGB34yCaOcPgYXr9AAkV/IwqFfWWTXEPUgLYkSkqBhIk9KK7gGmgjc64B+RdoeIDM13Irnw==} engines: {node: ^18.18.0 || ^19.8.0 || >= 20.0.0} @@ -9323,14 +8758,6 @@ packages: '@swc/core': optional: true - object-assign@4.1.1: - resolution: {integrity: sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==} - engines: {node: '>=0.10.0'} - - object-hash@3.0.0: - resolution: {integrity: sha512-RSn9F68PjH9HqtltsSnqYC1XXoWe9Bju5+213R98cNGttag9q9yAOTzdbsqvIa7aNm5WffBZFpWYr2aWrklWAw==} - engines: {node: '>= 6'} - object-inspect@1.13.4: resolution: {integrity: sha512-W67iLl4J2EXEGTbfeHCffrjDfitvLANg0UlX3wFUUSTx92KXRFegMHUVgSqE+wvhAbi4WqjGg9czysTV2Epbew==} engines: {node: '>= 0.4'} @@ -9756,30 +9183,6 @@ packages: peerDependencies: postcss: ^8.0.0 - postcss-import@15.1.0: - resolution: {integrity: sha512-hpr+J05B2FVYUAXHeK1YyI267J/dDDhMU6B6civm8hSY1jYJnBXxzKDKDswzJmtLHryrjhnDjqqp/49t8FALew==} - engines: {node: '>=14.0.0'} - peerDependencies: - postcss: ^8.0.0 - - postcss-js@4.1.0: - resolution: {integrity: sha512-oIAOTqgIo7q2EOwbhb8UalYePMvYoIeRY2YKntdpFQXNosSu3vLrniGgmH9OKs/qAkfoj5oB3le/7mINW1LCfw==} - engines: {node: ^12 || ^14 || >= 16} - peerDependencies: - postcss: ^8.4.21 - - postcss-load-config@4.0.2: - resolution: {integrity: sha512-bSVhyJGL00wMVoPUzAVAnbEoWyqRxkjv64tUl427SKnPrENtq6hJwUojroMz2VB+Q1edmi4IfrAPpami5VVgMQ==} - engines: {node: '>= 14'} - peerDependencies: - postcss: '>=8.0.9' - ts-node: '>=9.0.0' - peerDependenciesMeta: - postcss: - optional: true - ts-node: - optional: true - postcss-loader@6.2.1: resolution: {integrity: sha512-WbbYpmAaKcux/P66bZ40bpWsBucjx/TTgVVzRZ9yUO8yQfVBlameJ0ZGVaPfH64hNSBh63a+ICP5nqOpBA0w+Q==} engines: {node: '>= 12.13.0'} @@ -10146,11 +9549,6 @@ packages: resolution: {integrity: sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw==} hasBin: true - react-dom@19.0.0: - resolution: {integrity: sha512-4GV5sHFG0e/0AD4X+ySy6UJd3jVl1iNsNHdpad0qhABJ11twS3TTBnseqsKurKcsNqCEFeGL3uLpVChpIO3QfQ==} - peerDependencies: - react: ^19.0.0 - react-dom@19.2.0: resolution: {integrity: sha512-UlbRu4cAiGaIewkPyiRGJk0imDN2T3JjieT6spoL2UeSf5od4n5LB/mQ4ejmxhCFT1tYe8IvaFulzynWovsEFQ==} peerDependencies: @@ -10163,40 +9561,6 @@ packages: resolution: {integrity: sha512-z6F7K9bV85EfseRCp2bzrpyQ0Gkw1uLoCel9XBVWPg/TjRj94SkJzUTGfOa4bs7iJvBWtQG0Wq7wnI0syw3EBQ==} engines: {node: '>=0.10.0'} - react-remove-scroll-bar@2.3.8: - resolution: {integrity: sha512-9r+yi9+mgU33AKcj6IbT9oRCO78WriSj6t/cF8DWBZJ9aOGPOTEDvdUDz1FwKim7QXWwmHqtdHnRJfhAxEG46Q==} - engines: {node: '>=10'} - peerDependencies: - '@types/react': '*' - react: ^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 - peerDependenciesMeta: - '@types/react': - optional: true - - react-remove-scroll@2.7.1: - resolution: {integrity: sha512-HpMh8+oahmIdOuS5aFKKY6Pyog+FNaZV/XyJOq7b4YFwsFHe5yYfdbIalI4k3vU2nSDql7YskmUseHsRrJqIPA==} - engines: {node: '>=10'} - peerDependencies: - '@types/react': '*' - react: ^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 || ^19.0.0-rc - peerDependenciesMeta: - '@types/react': - optional: true - - react-style-singleton@2.2.3: - resolution: {integrity: sha512-b6jSvxvVnyptAiLjbkWLE/lOnR4lfTtDAl+eUC7RZy+QQWc6wRzIV2CE6xBuMmDxc2qIihtDCZD5NPOFl7fRBQ==} - engines: {node: '>=10'} - peerDependencies: - '@types/react': '*' - react: ^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 || ^19.0.0-rc - peerDependenciesMeta: - '@types/react': - optional: true - - react@19.0.0: - resolution: {integrity: sha512-V8AVnmPIICiWpGfm6GLzCR/W5FXLchHop40W4nXBmdlEceh16rCN8O8LNWm5bh5XUX91fh7KpA+W0TgMKmgTpQ==} - engines: {node: '>=0.10.0'} - react@19.2.0: resolution: {integrity: sha512-tmbWg6W31tQLeB5cdIBOicJDJRR2KzXsV7uSK9iNfLWQ5bIZfxuPEHp7M8wiHyHnn0DD1i7w3Zmin0FtkrwoCQ==} engines: {node: '>=0.10.0'} @@ -10664,9 +10028,6 @@ packages: resolution: {integrity: sha512-xAg7SOnEhrm5zI3puOOKyy1OMcMlIJZYNJY7xLBwSze0UjhPLnWfj2GF2EpT0jmzaJKIWKHLsaSSajf35bcYnA==} engines: {node: '>=v12.22.7'} - scheduler@0.25.0: - resolution: {integrity: sha512-xFVuu11jh+xcO7JOAGJNOXld8/TcEHK/4CituBUeUb5hqxJLj9YuemAEuvm9gQ/+pgXYfbQuqAkiYu+u7YEsNA==} - scheduler@0.27.0: resolution: {integrity: sha512-eNv+WrVbKu1f3vbYJT/xtiF5syA5HPIMtf9IgY/nKg0sWqzAUEvqY/xm7OcZc/qafLx/iO9FgOmeSAp4v5ti/Q==} @@ -11162,11 +10523,6 @@ packages: engines: {node: '>=16'} hasBin: true - sucrase@3.35.0: - resolution: {integrity: sha512-8EbVDiu9iN/nESwxeSxDKe0dunta1GOlHufmSSXxMD2z2/tMZpDMpvXQGsc+ajGo8y2uYUmixaSRUc/QPoQ0GA==} - engines: {node: '>=16 || 14 >=14.17'} - hasBin: true - suf-log@2.5.3: resolution: {integrity: sha512-KvC8OPjzdNOe+xQ4XWJV2whQA0aM1kGVczMQ8+dStAO6KfEB140JEVQ9dE76ONZ0/Ylf67ni4tILPJB41U0eow==} @@ -11228,19 +10584,6 @@ packages: resolution: {integrity: sha512-ulAk51I9UVUyJgxlv9M6lFot2WP3e7t8Kz9+IS6D4rVba1tR9kON+Ey69f+1R4Q8cd45Lod6a4IcJIxnzGc/zA==} engines: {node: '>=18'} - tailwind-merge@2.6.0: - resolution: {integrity: sha512-P+Vu1qXfzediirmHOC3xKGAYeZtPcV9g76X+xg2FD4tYgR71ewMA35Y3sCz3zhiN/dwefRpJX0yBcgwi1fXNQA==} - - tailwindcss-animate@1.0.7: - resolution: {integrity: sha512-bl6mpH3T7I3UFxuvDEXLxy/VuFxBk5bbzplh7tXI68mwMokNYd1t9qPBHlnyTwfa4JGC4zP516I1hYYtQ/vspA==} - peerDependencies: - tailwindcss: '>=3.0.0 || insiders' - - tailwindcss@3.4.17: - resolution: {integrity: sha512-w33E2aCvSDP0tW9RZuNXadXlkHXqFzSkQew/aIa2i/Sj8fThxwovwlXHSPXTbAHwEIhBFXAedUhP2tueAKP8Og==} - engines: {node: '>=14.0.0'} - hasBin: true - tapable@2.3.0: resolution: {integrity: sha512-g9ljZiwki/LfxmQADO3dEY1CbpmXT5Hm2fJ+QaGKwSXUylMybePR7/67YW7jOrrvjEgL1Fmz5kzyAjWVWLlucg==} engines: {node: '>=6'} @@ -11312,13 +10655,6 @@ packages: text-hex@1.0.0: resolution: {integrity: sha512-uuVGNWzgJ4yhRaNSiubPY7OjISw4sw4E5Uv0wbjp+OzcbmVU/rsT8ujgcXJhn9ypzsgr5vlzpPqP+MBBKcGvbg==} - thenify-all@1.6.0: - resolution: {integrity: sha512-RNxQH/qI8/t3thXJDwcstUO4zeqo64+Uy/+sNVRBx4Xn2OX+OZ9oP+iJnNFqplFra2ZUVeKCSa2oVWi3T4uVmA==} - engines: {node: '>=0.8'} - - thenify@3.3.1: - resolution: {integrity: sha512-RVZSIV5IG10Hk3enotrhvz0T9em6cyHBLkH/YAZuKqd8hRkKhSfCGIcP2KUY0EPxndzANBmNllzWPwak+bheSw==} - thingies@2.5.0: resolution: {integrity: sha512-s+2Bwztg6PhWUD7XMfeYm5qliDdSiZm7M7n8KjTkIsm3l/2lgVRc2/Gx/v+ZX8lT4FMA+i8aQvhcWylldc+ZNw==} engines: {node: '>=10.18'} @@ -11443,9 +10779,6 @@ packages: peerDependencies: typescript: '>=4.8.4' - ts-interface-checker@0.1.13: - resolution: {integrity: sha512-Y/arvbn+rrz3JCKl9C4kVNfTfSm2/mEp5FSz5EsZSANGPSlQrpRI5M4PKF+mJnE52jOO90PnPSc3Ur3bTQw0gA==} - ts-loader@9.5.4: resolution: {integrity: sha512-nCz0rEwunlTZiy6rXFByQU1kVVpCIgUpc/psFiKVrUwrizdnIbRFu8w7bxhUF0X613DYwT4XzrZHpVyMe758hQ==} engines: {node: '>=12.0.0'} @@ -11805,31 +11138,11 @@ packages: url-parse@1.5.10: resolution: {integrity: sha512-WypcfiRhfeUP9vvF0j6rw0J3hrWrw6iZv3+22h6iRMJ/8z1Tj6XfLP4DsUix5MhMPnXpiHDoKyoZ/bdCkwBCiQ==} - urlpattern-polyfill@10.1.0: - resolution: {integrity: sha512-IGjKp/o0NL3Bso1PymYURCJxMPNAf/ILOpendP9f5B6e1rTJgdgiOvgfoT8VxCAdY+Wisb9uhGaJJf3yZ2V9nw==} - - urlpattern-polyfill@8.0.2: - resolution: {integrity: sha512-Qp95D4TPJl1kC9SKigDcqgyM2VDVO4RiJc2d4qe5GrYm+zbIQCWWKAFaJNQ4BhdFeDGwBmAxqJBwWSJDb9T3BQ==} - - use-callback-ref@1.3.3: - resolution: {integrity: sha512-jQL3lRnocaFtu3V00JToYz/4QkNWswxijDaCVNZRiRTO3HQDLsdu1ZtmIUvV4yPp+rvWm5j0y0TG/S61cuijTg==} - engines: {node: '>=10'} - peerDependencies: - '@types/react': '*' - react: ^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 || ^19.0.0-rc - peerDependenciesMeta: - '@types/react': - optional: true - - use-sidecar@1.1.3: - resolution: {integrity: sha512-Fedw0aZvkhynoPYlA5WXrMCAMm+nSWdZt6lzJQ7Ok8S6Q+VsHmHpRWndVRJ8Be0ZbkfPc5LRYH+5XrzXcEeLRQ==} - engines: {node: '>=10'} - peerDependencies: - '@types/react': '*' - react: ^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 || ^19.0.0-rc - peerDependenciesMeta: - '@types/react': - optional: true + urlpattern-polyfill@10.1.0: + resolution: {integrity: sha512-IGjKp/o0NL3Bso1PymYURCJxMPNAf/ILOpendP9f5B6e1rTJgdgiOvgfoT8VxCAdY+Wisb9uhGaJJf3yZ2V9nw==} + + urlpattern-polyfill@8.0.2: + resolution: {integrity: sha512-Qp95D4TPJl1kC9SKigDcqgyM2VDVO4RiJc2d4qe5GrYm+zbIQCWWKAFaJNQ4BhdFeDGwBmAxqJBwWSJDb9T3BQ==} util-deprecate@1.0.2: resolution: {integrity: sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==} @@ -12554,8 +11867,6 @@ snapshots: '@adobe/css-tools@4.3.3': {} - '@alloc/quick-lru@5.2.0': {} - '@ampproject/remapping@2.3.0': dependencies: '@jridgewell/gen-mapping': 0.3.13 @@ -14561,23 +13872,6 @@ snapshots: fastq: 1.19.1 glob: 10.4.5 - '@floating-ui/core@1.7.3': - dependencies: - '@floating-ui/utils': 0.2.10 - - '@floating-ui/dom@1.7.4': - dependencies: - '@floating-ui/core': 1.7.3 - '@floating-ui/utils': 0.2.10 - - '@floating-ui/react-dom@2.1.6(react-dom@19.0.0(react@19.0.0))(react@19.0.0)': - dependencies: - '@floating-ui/dom': 1.7.4 - react: 19.0.0 - react-dom: 19.0.0(react@19.0.0) - - '@floating-ui/utils@0.2.10': {} - '@henrygd/queue@1.1.1': {} '@humanfs/core@0.19.1': {} @@ -16745,309 +16039,6 @@ snapshots: '@poppinss/exception@1.2.2': {} - '@radix-ui/primitive@1.1.3': {} - - '@radix-ui/react-arrow@1.1.7(@types/react-dom@19.0.2(@types/react@19.1.9))(@types/react@19.1.9)(react-dom@19.0.0(react@19.0.0))(react@19.0.0)': - dependencies: - '@radix-ui/react-primitive': 2.1.3(@types/react-dom@19.0.2(@types/react@19.1.9))(@types/react@19.1.9)(react-dom@19.0.0(react@19.0.0))(react@19.0.0) - react: 19.0.0 - react-dom: 19.0.0(react@19.0.0) - optionalDependencies: - '@types/react': 19.1.9 - '@types/react-dom': 19.0.2(@types/react@19.1.9) - - '@radix-ui/react-checkbox@1.3.3(@types/react-dom@19.0.2(@types/react@19.1.9))(@types/react@19.1.9)(react-dom@19.0.0(react@19.0.0))(react@19.0.0)': - dependencies: - '@radix-ui/primitive': 1.1.3 - '@radix-ui/react-compose-refs': 1.1.2(@types/react@19.1.9)(react@19.0.0) - '@radix-ui/react-context': 1.1.2(@types/react@19.1.9)(react@19.0.0) - '@radix-ui/react-presence': 1.1.5(@types/react-dom@19.0.2(@types/react@19.1.9))(@types/react@19.1.9)(react-dom@19.0.0(react@19.0.0))(react@19.0.0) - '@radix-ui/react-primitive': 2.1.3(@types/react-dom@19.0.2(@types/react@19.1.9))(@types/react@19.1.9)(react-dom@19.0.0(react@19.0.0))(react@19.0.0) - '@radix-ui/react-use-controllable-state': 1.2.2(@types/react@19.1.9)(react@19.0.0) - '@radix-ui/react-use-previous': 1.1.1(@types/react@19.1.9)(react@19.0.0) - '@radix-ui/react-use-size': 1.1.1(@types/react@19.1.9)(react@19.0.0) - react: 19.0.0 - react-dom: 19.0.0(react@19.0.0) - optionalDependencies: - '@types/react': 19.1.9 - '@types/react-dom': 19.0.2(@types/react@19.1.9) - - '@radix-ui/react-collapsible@1.1.12(@types/react-dom@19.0.2(@types/react@19.1.9))(@types/react@19.1.9)(react-dom@19.0.0(react@19.0.0))(react@19.0.0)': - dependencies: - '@radix-ui/primitive': 1.1.3 - '@radix-ui/react-compose-refs': 1.1.2(@types/react@19.1.9)(react@19.0.0) - '@radix-ui/react-context': 1.1.2(@types/react@19.1.9)(react@19.0.0) - '@radix-ui/react-id': 1.1.1(@types/react@19.1.9)(react@19.0.0) - '@radix-ui/react-presence': 1.1.5(@types/react-dom@19.0.2(@types/react@19.1.9))(@types/react@19.1.9)(react-dom@19.0.0(react@19.0.0))(react@19.0.0) - '@radix-ui/react-primitive': 2.1.3(@types/react-dom@19.0.2(@types/react@19.1.9))(@types/react@19.1.9)(react-dom@19.0.0(react@19.0.0))(react@19.0.0) - '@radix-ui/react-use-controllable-state': 1.2.2(@types/react@19.1.9)(react@19.0.0) - '@radix-ui/react-use-layout-effect': 1.1.1(@types/react@19.1.9)(react@19.0.0) - react: 19.0.0 - react-dom: 19.0.0(react@19.0.0) - optionalDependencies: - '@types/react': 19.1.9 - '@types/react-dom': 19.0.2(@types/react@19.1.9) - - '@radix-ui/react-collection@1.1.7(@types/react-dom@19.0.2(@types/react@19.1.9))(@types/react@19.1.9)(react-dom@19.0.0(react@19.0.0))(react@19.0.0)': - dependencies: - '@radix-ui/react-compose-refs': 1.1.2(@types/react@19.1.9)(react@19.0.0) - '@radix-ui/react-context': 1.1.2(@types/react@19.1.9)(react@19.0.0) - '@radix-ui/react-primitive': 2.1.3(@types/react-dom@19.0.2(@types/react@19.1.9))(@types/react@19.1.9)(react-dom@19.0.0(react@19.0.0))(react@19.0.0) - '@radix-ui/react-slot': 1.2.3(@types/react@19.1.9)(react@19.0.0) - react: 19.0.0 - react-dom: 19.0.0(react@19.0.0) - optionalDependencies: - '@types/react': 19.1.9 - '@types/react-dom': 19.0.2(@types/react@19.1.9) - - '@radix-ui/react-compose-refs@1.1.2(@types/react@19.1.9)(react@19.0.0)': - dependencies: - react: 19.0.0 - optionalDependencies: - '@types/react': 19.1.9 - - '@radix-ui/react-context@1.1.2(@types/react@19.1.9)(react@19.0.0)': - dependencies: - react: 19.0.0 - optionalDependencies: - '@types/react': 19.1.9 - - '@radix-ui/react-direction@1.1.1(@types/react@19.1.9)(react@19.0.0)': - dependencies: - react: 19.0.0 - optionalDependencies: - '@types/react': 19.1.9 - - '@radix-ui/react-dismissable-layer@1.1.11(@types/react-dom@19.0.2(@types/react@19.1.9))(@types/react@19.1.9)(react-dom@19.0.0(react@19.0.0))(react@19.0.0)': - dependencies: - '@radix-ui/primitive': 1.1.3 - '@radix-ui/react-compose-refs': 1.1.2(@types/react@19.1.9)(react@19.0.0) - '@radix-ui/react-primitive': 2.1.3(@types/react-dom@19.0.2(@types/react@19.1.9))(@types/react@19.1.9)(react-dom@19.0.0(react@19.0.0))(react@19.0.0) - '@radix-ui/react-use-callback-ref': 1.1.1(@types/react@19.1.9)(react@19.0.0) - '@radix-ui/react-use-escape-keydown': 1.1.1(@types/react@19.1.9)(react@19.0.0) - react: 19.0.0 - react-dom: 19.0.0(react@19.0.0) - optionalDependencies: - '@types/react': 19.1.9 - '@types/react-dom': 19.0.2(@types/react@19.1.9) - - '@radix-ui/react-dropdown-menu@2.1.16(@types/react-dom@19.0.2(@types/react@19.1.9))(@types/react@19.1.9)(react-dom@19.0.0(react@19.0.0))(react@19.0.0)': - dependencies: - '@radix-ui/primitive': 1.1.3 - '@radix-ui/react-compose-refs': 1.1.2(@types/react@19.1.9)(react@19.0.0) - '@radix-ui/react-context': 1.1.2(@types/react@19.1.9)(react@19.0.0) - '@radix-ui/react-id': 1.1.1(@types/react@19.1.9)(react@19.0.0) - '@radix-ui/react-menu': 2.1.16(@types/react-dom@19.0.2(@types/react@19.1.9))(@types/react@19.1.9)(react-dom@19.0.0(react@19.0.0))(react@19.0.0) - '@radix-ui/react-primitive': 2.1.3(@types/react-dom@19.0.2(@types/react@19.1.9))(@types/react@19.1.9)(react-dom@19.0.0(react@19.0.0))(react@19.0.0) - '@radix-ui/react-use-controllable-state': 1.2.2(@types/react@19.1.9)(react@19.0.0) - react: 19.0.0 - react-dom: 19.0.0(react@19.0.0) - optionalDependencies: - '@types/react': 19.1.9 - '@types/react-dom': 19.0.2(@types/react@19.1.9) - - '@radix-ui/react-focus-guards@1.1.3(@types/react@19.1.9)(react@19.0.0)': - dependencies: - react: 19.0.0 - optionalDependencies: - '@types/react': 19.1.9 - - '@radix-ui/react-focus-scope@1.1.7(@types/react-dom@19.0.2(@types/react@19.1.9))(@types/react@19.1.9)(react-dom@19.0.0(react@19.0.0))(react@19.0.0)': - dependencies: - '@radix-ui/react-compose-refs': 1.1.2(@types/react@19.1.9)(react@19.0.0) - '@radix-ui/react-primitive': 2.1.3(@types/react-dom@19.0.2(@types/react@19.1.9))(@types/react@19.1.9)(react-dom@19.0.0(react@19.0.0))(react@19.0.0) - '@radix-ui/react-use-callback-ref': 1.1.1(@types/react@19.1.9)(react@19.0.0) - react: 19.0.0 - react-dom: 19.0.0(react@19.0.0) - optionalDependencies: - '@types/react': 19.1.9 - '@types/react-dom': 19.0.2(@types/react@19.1.9) - - '@radix-ui/react-id@1.1.1(@types/react@19.1.9)(react@19.0.0)': - dependencies: - '@radix-ui/react-use-layout-effect': 1.1.1(@types/react@19.1.9)(react@19.0.0) - react: 19.0.0 - optionalDependencies: - '@types/react': 19.1.9 - - '@radix-ui/react-label@2.1.8(@types/react-dom@19.0.2(@types/react@19.1.9))(@types/react@19.1.9)(react-dom@19.0.0(react@19.0.0))(react@19.0.0)': - dependencies: - '@radix-ui/react-primitive': 2.1.4(@types/react-dom@19.0.2(@types/react@19.1.9))(@types/react@19.1.9)(react-dom@19.0.0(react@19.0.0))(react@19.0.0) - react: 19.0.0 - react-dom: 19.0.0(react@19.0.0) - optionalDependencies: - '@types/react': 19.1.9 - '@types/react-dom': 19.0.2(@types/react@19.1.9) - - '@radix-ui/react-menu@2.1.16(@types/react-dom@19.0.2(@types/react@19.1.9))(@types/react@19.1.9)(react-dom@19.0.0(react@19.0.0))(react@19.0.0)': - dependencies: - '@radix-ui/primitive': 1.1.3 - '@radix-ui/react-collection': 1.1.7(@types/react-dom@19.0.2(@types/react@19.1.9))(@types/react@19.1.9)(react-dom@19.0.0(react@19.0.0))(react@19.0.0) - '@radix-ui/react-compose-refs': 1.1.2(@types/react@19.1.9)(react@19.0.0) - '@radix-ui/react-context': 1.1.2(@types/react@19.1.9)(react@19.0.0) - '@radix-ui/react-direction': 1.1.1(@types/react@19.1.9)(react@19.0.0) - '@radix-ui/react-dismissable-layer': 1.1.11(@types/react-dom@19.0.2(@types/react@19.1.9))(@types/react@19.1.9)(react-dom@19.0.0(react@19.0.0))(react@19.0.0) - '@radix-ui/react-focus-guards': 1.1.3(@types/react@19.1.9)(react@19.0.0) - '@radix-ui/react-focus-scope': 1.1.7(@types/react-dom@19.0.2(@types/react@19.1.9))(@types/react@19.1.9)(react-dom@19.0.0(react@19.0.0))(react@19.0.0) - '@radix-ui/react-id': 1.1.1(@types/react@19.1.9)(react@19.0.0) - '@radix-ui/react-popper': 1.2.8(@types/react-dom@19.0.2(@types/react@19.1.9))(@types/react@19.1.9)(react-dom@19.0.0(react@19.0.0))(react@19.0.0) - '@radix-ui/react-portal': 1.1.9(@types/react-dom@19.0.2(@types/react@19.1.9))(@types/react@19.1.9)(react-dom@19.0.0(react@19.0.0))(react@19.0.0) - '@radix-ui/react-presence': 1.1.5(@types/react-dom@19.0.2(@types/react@19.1.9))(@types/react@19.1.9)(react-dom@19.0.0(react@19.0.0))(react@19.0.0) - '@radix-ui/react-primitive': 2.1.3(@types/react-dom@19.0.2(@types/react@19.1.9))(@types/react@19.1.9)(react-dom@19.0.0(react@19.0.0))(react@19.0.0) - '@radix-ui/react-roving-focus': 1.1.11(@types/react-dom@19.0.2(@types/react@19.1.9))(@types/react@19.1.9)(react-dom@19.0.0(react@19.0.0))(react@19.0.0) - '@radix-ui/react-slot': 1.2.3(@types/react@19.1.9)(react@19.0.0) - '@radix-ui/react-use-callback-ref': 1.1.1(@types/react@19.1.9)(react@19.0.0) - aria-hidden: 1.2.6 - react: 19.0.0 - react-dom: 19.0.0(react@19.0.0) - react-remove-scroll: 2.7.1(@types/react@19.1.9)(react@19.0.0) - optionalDependencies: - '@types/react': 19.1.9 - '@types/react-dom': 19.0.2(@types/react@19.1.9) - - '@radix-ui/react-popper@1.2.8(@types/react-dom@19.0.2(@types/react@19.1.9))(@types/react@19.1.9)(react-dom@19.0.0(react@19.0.0))(react@19.0.0)': - dependencies: - '@floating-ui/react-dom': 2.1.6(react-dom@19.0.0(react@19.0.0))(react@19.0.0) - '@radix-ui/react-arrow': 1.1.7(@types/react-dom@19.0.2(@types/react@19.1.9))(@types/react@19.1.9)(react-dom@19.0.0(react@19.0.0))(react@19.0.0) - '@radix-ui/react-compose-refs': 1.1.2(@types/react@19.1.9)(react@19.0.0) - '@radix-ui/react-context': 1.1.2(@types/react@19.1.9)(react@19.0.0) - '@radix-ui/react-primitive': 2.1.3(@types/react-dom@19.0.2(@types/react@19.1.9))(@types/react@19.1.9)(react-dom@19.0.0(react@19.0.0))(react@19.0.0) - '@radix-ui/react-use-callback-ref': 1.1.1(@types/react@19.1.9)(react@19.0.0) - '@radix-ui/react-use-layout-effect': 1.1.1(@types/react@19.1.9)(react@19.0.0) - '@radix-ui/react-use-rect': 1.1.1(@types/react@19.1.9)(react@19.0.0) - '@radix-ui/react-use-size': 1.1.1(@types/react@19.1.9)(react@19.0.0) - '@radix-ui/rect': 1.1.1 - react: 19.0.0 - react-dom: 19.0.0(react@19.0.0) - optionalDependencies: - '@types/react': 19.1.9 - '@types/react-dom': 19.0.2(@types/react@19.1.9) - - '@radix-ui/react-portal@1.1.9(@types/react-dom@19.0.2(@types/react@19.1.9))(@types/react@19.1.9)(react-dom@19.0.0(react@19.0.0))(react@19.0.0)': - dependencies: - '@radix-ui/react-primitive': 2.1.3(@types/react-dom@19.0.2(@types/react@19.1.9))(@types/react@19.1.9)(react-dom@19.0.0(react@19.0.0))(react@19.0.0) - '@radix-ui/react-use-layout-effect': 1.1.1(@types/react@19.1.9)(react@19.0.0) - react: 19.0.0 - react-dom: 19.0.0(react@19.0.0) - optionalDependencies: - '@types/react': 19.1.9 - '@types/react-dom': 19.0.2(@types/react@19.1.9) - - '@radix-ui/react-presence@1.1.5(@types/react-dom@19.0.2(@types/react@19.1.9))(@types/react@19.1.9)(react-dom@19.0.0(react@19.0.0))(react@19.0.0)': - dependencies: - '@radix-ui/react-compose-refs': 1.1.2(@types/react@19.1.9)(react@19.0.0) - '@radix-ui/react-use-layout-effect': 1.1.1(@types/react@19.1.9)(react@19.0.0) - react: 19.0.0 - react-dom: 19.0.0(react@19.0.0) - optionalDependencies: - '@types/react': 19.1.9 - '@types/react-dom': 19.0.2(@types/react@19.1.9) - - '@radix-ui/react-primitive@2.1.3(@types/react-dom@19.0.2(@types/react@19.1.9))(@types/react@19.1.9)(react-dom@19.0.0(react@19.0.0))(react@19.0.0)': - dependencies: - '@radix-ui/react-slot': 1.2.3(@types/react@19.1.9)(react@19.0.0) - react: 19.0.0 - react-dom: 19.0.0(react@19.0.0) - optionalDependencies: - '@types/react': 19.1.9 - '@types/react-dom': 19.0.2(@types/react@19.1.9) - - '@radix-ui/react-primitive@2.1.4(@types/react-dom@19.0.2(@types/react@19.1.9))(@types/react@19.1.9)(react-dom@19.0.0(react@19.0.0))(react@19.0.0)': - dependencies: - '@radix-ui/react-slot': 1.2.4(@types/react@19.1.9)(react@19.0.0) - react: 19.0.0 - react-dom: 19.0.0(react@19.0.0) - optionalDependencies: - '@types/react': 19.1.9 - '@types/react-dom': 19.0.2(@types/react@19.1.9) - - '@radix-ui/react-roving-focus@1.1.11(@types/react-dom@19.0.2(@types/react@19.1.9))(@types/react@19.1.9)(react-dom@19.0.0(react@19.0.0))(react@19.0.0)': - dependencies: - '@radix-ui/primitive': 1.1.3 - '@radix-ui/react-collection': 1.1.7(@types/react-dom@19.0.2(@types/react@19.1.9))(@types/react@19.1.9)(react-dom@19.0.0(react@19.0.0))(react@19.0.0) - '@radix-ui/react-compose-refs': 1.1.2(@types/react@19.1.9)(react@19.0.0) - '@radix-ui/react-context': 1.1.2(@types/react@19.1.9)(react@19.0.0) - '@radix-ui/react-direction': 1.1.1(@types/react@19.1.9)(react@19.0.0) - '@radix-ui/react-id': 1.1.1(@types/react@19.1.9)(react@19.0.0) - '@radix-ui/react-primitive': 2.1.3(@types/react-dom@19.0.2(@types/react@19.1.9))(@types/react@19.1.9)(react-dom@19.0.0(react@19.0.0))(react@19.0.0) - '@radix-ui/react-use-callback-ref': 1.1.1(@types/react@19.1.9)(react@19.0.0) - '@radix-ui/react-use-controllable-state': 1.2.2(@types/react@19.1.9)(react@19.0.0) - react: 19.0.0 - react-dom: 19.0.0(react@19.0.0) - optionalDependencies: - '@types/react': 19.1.9 - '@types/react-dom': 19.0.2(@types/react@19.1.9) - - '@radix-ui/react-slot@1.2.3(@types/react@19.1.9)(react@19.0.0)': - dependencies: - '@radix-ui/react-compose-refs': 1.1.2(@types/react@19.1.9)(react@19.0.0) - react: 19.0.0 - optionalDependencies: - '@types/react': 19.1.9 - - '@radix-ui/react-slot@1.2.4(@types/react@19.1.9)(react@19.0.0)': - dependencies: - '@radix-ui/react-compose-refs': 1.1.2(@types/react@19.1.9)(react@19.0.0) - react: 19.0.0 - optionalDependencies: - '@types/react': 19.1.9 - - '@radix-ui/react-use-callback-ref@1.1.1(@types/react@19.1.9)(react@19.0.0)': - dependencies: - react: 19.0.0 - optionalDependencies: - '@types/react': 19.1.9 - - '@radix-ui/react-use-controllable-state@1.2.2(@types/react@19.1.9)(react@19.0.0)': - dependencies: - '@radix-ui/react-use-effect-event': 0.0.2(@types/react@19.1.9)(react@19.0.0) - '@radix-ui/react-use-layout-effect': 1.1.1(@types/react@19.1.9)(react@19.0.0) - react: 19.0.0 - optionalDependencies: - '@types/react': 19.1.9 - - '@radix-ui/react-use-effect-event@0.0.2(@types/react@19.1.9)(react@19.0.0)': - dependencies: - '@radix-ui/react-use-layout-effect': 1.1.1(@types/react@19.1.9)(react@19.0.0) - react: 19.0.0 - optionalDependencies: - '@types/react': 19.1.9 - - '@radix-ui/react-use-escape-keydown@1.1.1(@types/react@19.1.9)(react@19.0.0)': - dependencies: - '@radix-ui/react-use-callback-ref': 1.1.1(@types/react@19.1.9)(react@19.0.0) - react: 19.0.0 - optionalDependencies: - '@types/react': 19.1.9 - - '@radix-ui/react-use-layout-effect@1.1.1(@types/react@19.1.9)(react@19.0.0)': - dependencies: - react: 19.0.0 - optionalDependencies: - '@types/react': 19.1.9 - - '@radix-ui/react-use-previous@1.1.1(@types/react@19.1.9)(react@19.0.0)': - dependencies: - react: 19.0.0 - optionalDependencies: - '@types/react': 19.1.9 - - '@radix-ui/react-use-rect@1.1.1(@types/react@19.1.9)(react@19.0.0)': - dependencies: - '@radix-ui/rect': 1.1.1 - react: 19.0.0 - optionalDependencies: - '@types/react': 19.1.9 - - '@radix-ui/react-use-size@1.1.1(@types/react@19.1.9)(react@19.0.0)': - dependencies: - '@radix-ui/react-use-layout-effect': 1.1.1(@types/react@19.1.9)(react@19.0.0) - react: 19.0.0 - optionalDependencies: - '@types/react': 19.1.9 - - '@radix-ui/rect@1.1.1': {} - '@rolldown/pluginutils@1.0.0-beta.27': {} '@rollup/pluginutils@5.3.0(rollup@4.53.2)': @@ -17317,44 +16308,18 @@ snapshots: '@speed-highlight/core@1.2.12': {} - '@supabase/auth-js@2.69.1': - dependencies: - '@supabase/node-fetch': 2.6.15 - '@supabase/auth-js@2.81.0': dependencies: tslib: 2.8.1 - '@supabase/functions-js@2.4.4': - dependencies: - '@supabase/node-fetch': 2.6.15 - '@supabase/functions-js@2.81.0': dependencies: tslib: 2.8.1 - '@supabase/node-fetch@2.6.15': - dependencies: - whatwg-url: 5.0.0 - - '@supabase/postgrest-js@1.19.4': - dependencies: - '@supabase/node-fetch': 2.6.15 - '@supabase/postgrest-js@2.81.0': dependencies: tslib: 2.8.1 - '@supabase/realtime-js@2.11.2': - dependencies: - '@supabase/node-fetch': 2.6.15 - '@types/phoenix': 1.6.6 - '@types/ws': 8.18.1 - ws: 8.18.3 - transitivePeerDependencies: - - bufferutil - - utf-8-validate - '@supabase/realtime-js@2.81.0': dependencies: '@types/phoenix': 1.6.6 @@ -17365,31 +16330,10 @@ snapshots: - bufferutil - utf-8-validate - '@supabase/ssr@0.7.0(@supabase/supabase-js@2.49.4)': - dependencies: - '@supabase/supabase-js': 2.49.4 - cookie: 1.0.2 - - '@supabase/storage-js@2.7.1': - dependencies: - '@supabase/node-fetch': 2.6.15 - '@supabase/storage-js@2.81.0': dependencies: tslib: 2.8.1 - '@supabase/supabase-js@2.49.4': - dependencies: - '@supabase/auth-js': 2.69.1 - '@supabase/functions-js': 2.4.4 - '@supabase/node-fetch': 2.6.15 - '@supabase/postgrest-js': 1.19.4 - '@supabase/realtime-js': 2.11.2 - '@supabase/storage-js': 2.7.1 - transitivePeerDependencies: - - bufferutil - - utf-8-validate - '@supabase/supabase-js@2.81.0': dependencies: '@supabase/auth-js': 2.81.0 @@ -17688,18 +16632,10 @@ snapshots: '@types/range-parser@1.2.7': {} - '@types/react-dom@19.0.2(@types/react@19.1.9)': - dependencies: - '@types/react': 19.1.9 - '@types/react-dom@19.2.2(@types/react@19.2.2)': dependencies: '@types/react': 19.2.2 - '@types/react@19.1.9': - dependencies: - csstype: 3.1.3 - '@types/react@19.2.2': dependencies: csstype: 3.1.3 @@ -18494,8 +17430,6 @@ snapshots: ansis@4.2.0: {} - any-promise@1.3.0: {} - anymatch@3.1.3: dependencies: normalize-path: 3.0.0 @@ -18534,10 +17468,6 @@ snapshots: argparse@2.0.1: {} - aria-hidden@1.2.6: - dependencies: - tslib: 2.8.1 - aria-query@5.3.2: {} array-flatten@1.1.1: {} @@ -19078,8 +18008,6 @@ snapshots: callsites@3.1.0: {} - camelcase-css@2.0.1: {} - camelcase@5.3.1: {} camelcase@6.3.0: {} @@ -19173,10 +18101,6 @@ snapshots: cjs-module-lexer@1.4.3: {} - class-variance-authority@0.7.1: - dependencies: - clsx: 2.1.1 - clean-deep@3.4.0: dependencies: lodash.isempty: 4.4.0 @@ -19299,8 +18223,6 @@ snapshots: commander@2.20.3: {} - commander@4.1.1: {} - commander@7.2.0: {} commander@9.5.0: {} @@ -19704,8 +18626,6 @@ snapshots: detect-newline@3.1.0: {} - detect-node-es@1.1.0: {} - detect-node@2.1.0: {} detect-port@1.6.1: @@ -19783,8 +18703,6 @@ snapshots: dfa@1.2.0: {} - didyoumean@1.2.2: {} - diff-sequences@29.6.3: {} diff@4.0.2: {} @@ -20738,15 +19656,6 @@ snapshots: fraction.js@4.3.7: {} - framer-motion@12.23.24(react-dom@19.0.0(react@19.0.0))(react@19.0.0): - dependencies: - motion-dom: 12.23.23 - motion-utils: 12.23.6 - tslib: 2.8.1 - optionalDependencies: - react: 19.0.0 - react-dom: 19.0.0(react@19.0.0) - fresh@0.5.2: {} from2@2.3.0: @@ -20830,8 +19739,6 @@ snapshots: hasown: 2.0.2 math-intrinsics: 1.1.0 - get-nonce@1.0.1: {} - get-package-name@2.2.0: {} get-package-type@0.1.0: {} @@ -22030,8 +20937,6 @@ snapshots: merge-stream: 2.0.0 supports-color: 8.1.1 - jiti@1.21.7: {} - jiti@2.4.2: {} jju@1.4.0: {} @@ -22467,10 +21372,6 @@ snapshots: dependencies: yallist: 4.0.0 - lucide-react@0.468.0(react@19.0.0): - dependencies: - react: 19.0.0 - luxon@3.7.2: {} macos-release@3.4.0: {} @@ -23160,12 +22061,6 @@ snapshots: fast-equals: 3.0.3 micro-memoize: 4.2.0 - motion-dom@12.23.23: - dependencies: - motion-utils: 12.23.6 - - motion-utils@12.23.6: {} - move-file@3.1.0: dependencies: path-exists: 5.0.0 @@ -23197,12 +22092,6 @@ snapshots: mute-stream@0.0.8: {} - mz@2.7.0: - dependencies: - any-promise: 1.3.0 - object-assign: 4.1.1 - thenify-all: 1.6.0 - nan@2.23.1: optional: true @@ -23363,44 +22252,6 @@ snapshots: netlify-redirector@0.5.0: {} - next-plausible@3.12.5(next@15.0.3(@babel/core@7.28.5)(@opentelemetry/api@1.8.0)(babel-plugin-macros@3.1.0)(react-dom@19.0.0(react@19.0.0))(react@19.0.0)(sass@1.94.0))(react-dom@19.0.0(react@19.0.0))(react@19.0.0): - dependencies: - next: 15.0.3(@babel/core@7.28.5)(@opentelemetry/api@1.8.0)(babel-plugin-macros@3.1.0)(react-dom@19.0.0(react@19.0.0))(react@19.0.0)(sass@1.94.0) - react: 19.0.0 - react-dom: 19.0.0(react@19.0.0) - - next-themes@0.4.6(react-dom@19.0.0(react@19.0.0))(react@19.0.0): - dependencies: - react: 19.0.0 - react-dom: 19.0.0(react@19.0.0) - - next@15.0.3(@babel/core@7.28.5)(@opentelemetry/api@1.8.0)(babel-plugin-macros@3.1.0)(react-dom@19.0.0(react@19.0.0))(react@19.0.0)(sass@1.94.0): - dependencies: - '@next/env': 15.0.3 - '@swc/counter': 0.1.3 - '@swc/helpers': 0.5.13 - busboy: 1.6.0 - caniuse-lite: 1.0.30001754 - postcss: 8.4.31 - react: 19.0.0 - react-dom: 19.0.0(react@19.0.0) - styled-jsx: 5.1.6(@babel/core@7.28.5)(babel-plugin-macros@3.1.0)(react@19.0.0) - optionalDependencies: - '@next/swc-darwin-arm64': 15.0.3 - '@next/swc-darwin-x64': 15.0.3 - '@next/swc-linux-arm64-gnu': 15.0.3 - '@next/swc-linux-arm64-musl': 15.0.3 - '@next/swc-linux-x64-gnu': 15.0.3 - '@next/swc-linux-x64-musl': 15.0.3 - '@next/swc-win32-arm64-msvc': 15.0.3 - '@next/swc-win32-x64-msvc': 15.0.3 - '@opentelemetry/api': 1.8.0 - sass: 1.94.0 - sharp: 0.33.5 - transitivePeerDependencies: - - '@babel/core' - - babel-plugin-macros - next@15.0.3(@babel/core@7.28.5)(@opentelemetry/api@1.8.0)(babel-plugin-macros@3.1.0)(react-dom@19.2.0(react@19.2.0))(react@19.2.0)(sass@1.94.0): dependencies: '@next/env': 15.0.3 @@ -23644,10 +22495,6 @@ snapshots: transitivePeerDependencies: - debug - object-assign@4.1.1: {} - - object-hash@3.0.0: {} - object-inspect@1.13.4: {} obuf@1.1.2: {} @@ -24076,26 +22923,6 @@ snapshots: read-cache: 1.0.0 resolve: 1.22.11 - postcss-import@15.1.0(postcss@8.4.49): - dependencies: - postcss: 8.4.49 - postcss-value-parser: 4.2.0 - read-cache: 1.0.0 - resolve: 1.22.11 - - postcss-js@4.1.0(postcss@8.4.49): - dependencies: - camelcase-css: 2.0.1 - postcss: 8.4.49 - - postcss-load-config@4.0.2(postcss@8.4.49)(ts-node@10.9.2(@types/node@18.16.20)(typescript@5.8.3)): - dependencies: - lilconfig: 3.1.3 - yaml: 2.8.1 - optionalDependencies: - postcss: 8.4.49 - ts-node: 10.9.2(@types/node@18.16.20)(typescript@5.8.3) - postcss-loader@6.2.1(postcss@8.4.49)(webpack@5.99.9(esbuild@0.19.12)): dependencies: cosmiconfig: 7.1.0 @@ -24458,11 +23285,6 @@ snapshots: minimist: 1.2.8 strip-json-comments: 2.0.1 - react-dom@19.0.0(react@19.0.0): - dependencies: - react: 19.0.0 - scheduler: 0.25.0 - react-dom@19.2.0(react@19.2.0): dependencies: react: 19.2.0 @@ -24472,35 +23294,6 @@ snapshots: react-refresh@0.17.0: {} - react-remove-scroll-bar@2.3.8(@types/react@19.1.9)(react@19.0.0): - dependencies: - react: 19.0.0 - react-style-singleton: 2.2.3(@types/react@19.1.9)(react@19.0.0) - tslib: 2.8.1 - optionalDependencies: - '@types/react': 19.1.9 - - react-remove-scroll@2.7.1(@types/react@19.1.9)(react@19.0.0): - dependencies: - react: 19.0.0 - react-remove-scroll-bar: 2.3.8(@types/react@19.1.9)(react@19.0.0) - react-style-singleton: 2.2.3(@types/react@19.1.9)(react@19.0.0) - tslib: 2.8.1 - use-callback-ref: 1.3.3(@types/react@19.1.9)(react@19.0.0) - use-sidecar: 1.1.3(@types/react@19.1.9)(react@19.0.0) - optionalDependencies: - '@types/react': 19.1.9 - - react-style-singleton@2.2.3(@types/react@19.1.9)(react@19.0.0): - dependencies: - get-nonce: 1.0.1 - react: 19.0.0 - tslib: 2.8.1 - optionalDependencies: - '@types/react': 19.1.9 - - react@19.0.0: {} - react@19.2.0: {} read-cache@1.0.0: @@ -25049,8 +23842,6 @@ snapshots: dependencies: xmlchars: 2.2.0 - scheduler@0.25.0: {} - scheduler@0.27.0: {} schema-utils@3.3.0: @@ -25643,14 +24434,6 @@ snapshots: dependencies: inline-style-parser: 0.2.6 - styled-jsx@5.1.6(@babel/core@7.28.5)(babel-plugin-macros@3.1.0)(react@19.0.0): - dependencies: - client-only: 0.0.1 - react: 19.0.0 - optionalDependencies: - '@babel/core': 7.28.5 - babel-plugin-macros: 3.1.0 - styled-jsx@5.1.6(@babel/core@7.28.5)(babel-plugin-macros@3.1.0)(react@19.2.0): dependencies: client-only: 0.0.1 @@ -25682,16 +24465,6 @@ snapshots: transitivePeerDependencies: - supports-color - sucrase@3.35.0: - dependencies: - '@jridgewell/gen-mapping': 0.3.13 - commander: 4.1.1 - glob: 10.4.5 - lines-and-columns: 1.2.4 - mz: 2.7.0 - pirates: 4.0.7 - ts-interface-checker: 0.1.13 - suf-log@2.5.3: dependencies: s.color: 0.0.15 @@ -25763,39 +24536,6 @@ snapshots: system-architecture@0.1.0: {} - tailwind-merge@2.6.0: {} - - tailwindcss-animate@1.0.7(tailwindcss@3.4.17(ts-node@10.9.2(@types/node@18.16.20)(typescript@5.8.3))): - dependencies: - tailwindcss: 3.4.17(ts-node@10.9.2(@types/node@18.16.20)(typescript@5.8.3)) - - tailwindcss@3.4.17(ts-node@10.9.2(@types/node@18.16.20)(typescript@5.8.3)): - dependencies: - '@alloc/quick-lru': 5.2.0 - arg: 5.0.2 - chokidar: 3.6.0 - didyoumean: 1.2.2 - dlv: 1.1.3 - fast-glob: 3.3.3 - glob-parent: 6.0.2 - is-glob: 4.0.3 - jiti: 1.21.7 - lilconfig: 3.1.3 - micromatch: 4.0.8 - normalize-path: 3.0.0 - object-hash: 3.0.0 - picocolors: 1.1.1 - postcss: 8.4.49 - postcss-import: 15.1.0(postcss@8.4.49) - postcss-js: 4.1.0(postcss@8.4.49) - postcss-load-config: 4.0.2(postcss@8.4.49)(ts-node@10.9.2(@types/node@18.16.20)(typescript@5.8.3)) - postcss-nested: 6.2.0(postcss@8.4.49) - postcss-selector-parser: 6.1.2 - resolve: 1.22.11 - sucrase: 3.35.0 - transitivePeerDependencies: - - ts-node - tapable@2.3.0: {} tar-fs@2.1.4: @@ -25916,14 +24656,6 @@ snapshots: text-hex@1.0.0: {} - thenify-all@1.6.0: - dependencies: - thenify: 3.3.1 - - thenify@3.3.1: - dependencies: - any-promise: 1.3.0 - thingies@2.5.0(tslib@2.8.1): dependencies: tslib: 2.8.1 @@ -26022,8 +24754,6 @@ snapshots: dependencies: typescript: 5.8.3 - ts-interface-checker@0.1.13: {} - ts-loader@9.5.4(typescript@5.8.3)(webpack@5.99.9(esbuild@0.19.12)): dependencies: chalk: 4.1.2 @@ -26359,21 +25089,6 @@ snapshots: urlpattern-polyfill@8.0.2: {} - use-callback-ref@1.3.3(@types/react@19.1.9)(react@19.0.0): - dependencies: - react: 19.0.0 - tslib: 2.8.1 - optionalDependencies: - '@types/react': 19.1.9 - - use-sidecar@1.1.3(@types/react@19.1.9)(react@19.0.0): - dependencies: - detect-node-es: 1.1.0 - react: 19.0.0 - tslib: 2.8.1 - optionalDependencies: - '@types/react': 19.1.9 - util-deprecate@1.0.2: {} utils-merge@1.0.1: {} diff --git a/pnpm-workspace.yaml b/pnpm-workspace.yaml index 989ee0e6a..1001471b9 100644 --- a/pnpm-workspace.yaml +++ b/pnpm-workspace.yaml @@ -1,6 +1,5 @@ packages: - 'pkgs/*' - - 'examples/playground' - '!pkgs/*/dist' - '!**/dist/**' diff --git a/scripts/snapshot-release.sh b/scripts/snapshot-release.sh index 5178f9235..354ad4262 100755 --- a/scripts/snapshot-release.sh +++ b/scripts/snapshot-release.sh @@ -249,7 +249,7 @@ echo -e "${YELLOW}━━━━━━━━━━━━━━━━━━━━ echo -e "${BOLD}Building packages...${NC}" echo "" -if pnpm nx run-many -t build --exclude=playground ; then +if pnpm nx run-many -t build ; then echo -e "${GREEN}✓ Packages built successfully${NC}" else echo -e "${RED}✗ Build failed${NC}" diff --git a/tsconfig.json b/tsconfig.json index 9d3ff9e25..1015cbfe8 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -20,9 +20,6 @@ }, { "path": "./pkgs/client" - }, - { - "path": "./examples/playground" } ] }