diff --git a/.changeset/afraid-sheep-joke.md b/.changeset/afraid-sheep-joke.md new file mode 100644 index 0000000000..f9ce48895c --- /dev/null +++ b/.changeset/afraid-sheep-joke.md @@ -0,0 +1,5 @@ +--- +"trigger.dev": patch +--- + +Fixes an issue with scoped packages in additionalPackages option diff --git a/.changeset/blue-pumas-whisper.md b/.changeset/blue-pumas-whisper.md new file mode 100644 index 0000000000..9bfa259b8b --- /dev/null +++ b/.changeset/blue-pumas-whisper.md @@ -0,0 +1,5 @@ +--- +"@trigger.dev/shopify": patch +--- + +improved error messages when a shopify webhook fails to register diff --git a/.changeset/cool-comics-burn.md b/.changeset/cool-comics-burn.md new file mode 100644 index 0000000000..eaaf10cea9 --- /dev/null +++ b/.changeset/cool-comics-burn.md @@ -0,0 +1,5 @@ +--- +"trigger.dev": patch +--- + +Fixing missing logs when importing client @opentelemetry/api diff --git a/.changeset/famous-boats-tease.md b/.changeset/famous-boats-tease.md new file mode 100644 index 0000000000..f8f7b681bc --- /dev/null +++ b/.changeset/famous-boats-tease.md @@ -0,0 +1,5 @@ +--- +"trigger.dev": patch +--- + +Better handle issues with resolving dependency versions during deploy diff --git a/.changeset/hot-fishes-retire.md b/.changeset/hot-fishes-retire.md new file mode 100644 index 0000000000..778a5d5d44 --- /dev/null +++ b/.changeset/hot-fishes-retire.md @@ -0,0 +1,5 @@ +--- +trigger.dev: patch +--- + +Fix TypeScript inclusion in tsconfig.json for `cli-v3 init` diff --git a/.changeset/lazy-files-lay.md b/.changeset/lazy-files-lay.md new file mode 100644 index 0000000000..0cfba8cc56 --- /dev/null +++ b/.changeset/lazy-files-lay.md @@ -0,0 +1,5 @@ +--- +"@trigger.dev/core": patch +--- + +Increased the timeout when canceling a checkpoint to 31s (to match the timeout on the server) diff --git a/.changeset/nice-bulldogs-turn.md b/.changeset/nice-bulldogs-turn.md new file mode 100644 index 0000000000..f58877e8a0 --- /dev/null +++ b/.changeset/nice-bulldogs-turn.md @@ -0,0 +1,6 @@ +--- +"trigger.dev": patch +"@trigger.dev/core": patch +--- + +Better handle uncaught exceptions diff --git a/.changeset/pre.json b/.changeset/pre.json index 8310ccd9bf..dbb68a0c66 100644 --- a/.changeset/pre.json +++ b/.changeset/pre.json @@ -44,9 +44,11 @@ "@trigger.dev/yalt": "2.3.18" }, "changesets": [ + "afraid-sheep-joke", "angry-eagles-trade", "beige-pens-dance", "big-tomatoes-deliver", + "blue-pumas-whisper", "breezy-gorillas-mate", "chilled-hornets-move", "clean-pianos-listen", @@ -59,6 +61,7 @@ "dull-mangos-press", "eight-pumas-float", "eleven-paws-join", + "famous-boats-tease", "few-students-share", "funny-swans-destroy", "gorgeous-gorillas-compete", @@ -67,6 +70,7 @@ "khaki-poems-lay", "late-icons-lie", "late-steaks-behave", + "lazy-files-lay", "lemon-jobs-repair", "light-bulldogs-press", "light-dragons-complain", @@ -79,6 +83,7 @@ "mighty-flowers-train", "nasty-jars-pump", "new-rivers-tell", + "nice-bulldogs-turn", "ninety-pets-travel", "odd-poets-own", "pink-pumas-rhyme", @@ -113,8 +118,10 @@ "tender-moose-tell", "tender-oranges-rhyme", "thin-parents-heal", + "thirty-islands-kiss", "tidy-balloons-suffer", "tidy-dryers-sleep", + "tidy-tomatoes-explain", "tiny-doors-type", "tiny-elephants-scream", "tricky-bulldogs-heal", diff --git a/.changeset/strange-sheep-pull.md b/.changeset/strange-sheep-pull.md new file mode 100644 index 0000000000..518a7949b3 --- /dev/null +++ b/.changeset/strange-sheep-pull.md @@ -0,0 +1,6 @@ +--- +"@trigger.dev/sdk": patch +"@trigger.dev/core": patch +--- + +v2: Better handle recovering from platform communication errors by auto-yielding back to the platform in case of temporary API failures diff --git a/.changeset/thirty-islands-kiss.md b/.changeset/thirty-islands-kiss.md new file mode 100644 index 0000000000..c510d8c972 --- /dev/null +++ b/.changeset/thirty-islands-kiss.md @@ -0,0 +1,5 @@ +--- +"trigger.dev": patch +--- + +Fix for typo in v3 CLI login command diff --git a/.changeset/tidy-tomatoes-explain.md b/.changeset/tidy-tomatoes-explain.md new file mode 100644 index 0000000000..9243df4776 --- /dev/null +++ b/.changeset/tidy-tomatoes-explain.md @@ -0,0 +1,5 @@ +--- +"@trigger.dev/core": patch +--- + +Fix 3rd party otel propagation from breaking our Task Events data from being properly correlated to the correct trace diff --git a/.env.example b/.env.example index a19ff46f29..bf5be8f95b 100644 --- a/.env.example +++ b/.env.example @@ -11,11 +11,16 @@ REMIX_APP_PORT=3030 APP_ENV=development APP_ORIGIN=http://localhost:3030 NODE_ENV=development +CLOUD_ENV=development +V3_ENABLED=true -# Redis is used for concurrency control -# REDIS_HOST="localhost" -# REDIS_PORT="6379" -# REDIS_TLS_DISABLED="true" +# Redis is used for the v3 queuing and v2 concurrency control +REDIS_HOST="localhost" +REDIS_PORT="6379" +REDIS_TLS_DISABLED="true" + +DEV_OTEL_EXPORTER_OTLP_ENDPOINT="http://localhost:3030/otel" +DEV_OTEL_BATCH_PROCESSING_ENABLED="0" # OPTIONAL VARIABLES # This is used for validating emails that are allowed to log in. Every email that do not match this regex will be rejected. diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 231cbdf1f9..f4a8ad5f09 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -10,12 +10,12 @@ Thank you for helping us make Trigger.dev even better! 🀩 The development branch is `main`. This is the branch that all pull requests should be made against. The changes on the `main` -branch are tagged into a release monthly. +branch are tagged into a release periodically. ### Prerequisites -- [Node.js](https://nodejs.org/en) version >=16.x -- [pnpm package manager](https://pnpm.io/installation) version 7 +- [Node.js](https://nodejs.org/en) version 20.11.1 +- [pnpm package manager](https://pnpm.io/installation) version 8.15.5 - [Docker](https://www.docker.com/get-started/) ### Setup @@ -33,15 +33,19 @@ branch are tagged into a release monthly. ``` cd trigger.dev ``` -3. Install the required packages using pnpm. +3. Ensure you are on the correct version of Node.js (20.11.1). If you are using `nvm`, there is an `.nvmrc` file that will automatically select the correct version of Node.js when you navigate to the repository. + +4. Run `corepack enable` to use the correct version of pnpm (`8.15.5`) as specified in the root `package.json` file. + +5. Install the required packages using pnpm. ``` pnpm i ``` -4. Create your `.env` file +6. Create your `.env` file ``` cp .env.example .env ``` -5. Open it and generate a new value for `ENCRYPTION_KEY`: +7. Open it and generate a new value for `ENCRYPTION_KEY`: `ENCRYPTION_KEY` is used to two-way encrypt OAuth access tokens and so you'll probably want to actually generate a unique value, and it must be a random 16 byte hex string. You can generate one with the following command: @@ -51,7 +55,7 @@ branch are tagged into a release monthly. Feel free to update `SESSION_SECRET` and `MAGIC_LINK_SECRET` as well using the same method. -6. Start Docker. This starts the required services like Postgres. If this is your first time using Docker, consider going through this [guide](DOCKER_INSTALLATION.md) +8. Start Docker. This starts the required services like Postgres & Redis. If this is your first time using Docker, consider going through this [guide](DOCKER_INSTALLATION.md) ``` pnpm run docker @@ -59,19 +63,15 @@ branch are tagged into a release monthly. This will also start and run a local instance of [pgAdmin](https://www.pgadmin.org/) on [localhost:5480](http://localhost:5480), preconfigured with email `admin@example.com` and pwd `admin`. Then use `postgres` as the password to the Trigger.dev server. -7. Migrate the database +9. Migrate the database ``` pnpm run db:migrate ``` -8. Build the app - ``` - pnpm run build --filter webapp - ``` -9. Run the seed script - ``` - pnpm run db:seed - ``` -10. Run the app. See the section below. +10. Build the server app + ``` + pnpm run build --filter webapp + ``` +11. Run the app. See the section below. ## Running @@ -83,99 +83,92 @@ branch are tagged into a release monthly. It should run on port `3030`: [http://localhost:3030](http://localhost:3030/) -2. Once the app is running click the magic link button and enter your email. -3. Check your terminal, the magic link email should have printed out as following: +2. Once the app is running click the magic link button and enter your email. You will automatically be logged in, since you are running locally. Create an Org and your first project in the dashboard. - ```sh - webapp:dev: Log in to Trigger.dev - webapp:dev: - webapp:dev: Click here to log in with this magic link - webapp:dev: [http://localhost:3030/magic?token=U2FsdGVkX18OvB0JxgaswTLCSbaRz%2FY82TN0EZWhSzFyZYwgG%2BIzKVTkeiaOtWfotPw7F8RwFzCHh53aBpMEu%2B%2B%2FItb%2FcJYh89MSjc3Pz92bevoEjqxSQ%2Ff%2BZbks09JOpqlBbYC3FzGWC8vuSVFBlxqLXxteSDLthZSUaC%2BS2LaA%2BJgp%2BLO7hgjAaC2lXbCHrM7MTgTdXOFt7i0Dvvuwz6%2BWY25RnfomZOPqDsyH0xz8Q2rzPTz0Xu53WSXrZ1hd] - webapp:dev: - webapp:dev: If you didn't try to log in, you can safely ignore this email. - ``` +## Manual testing using v3-catalog - Paste the magic link shown in your terminal into your browser to login. +We use the `/references/v3-catalog` subdirectory as a staging ground for testing changes to the SDK (`@trigger.dev/sdk` at `/packages/trigger-sdk`), the Core package (`@trigger.dev/core` at `packages/core`), the CLI (`trigger.dev` at `/packages/cli-v3`) and the platform (The remix app at `/apps/webapp`). The instructions below will get you started on using the `v3-catalog` for local development of Trigger.dev (v3). -## Adding and running migrations +### First-time setup -1. Modify packages/database/prisma/schema.prisma file -2. Change directory to the packages/database folder - ```sh - cd packages/database - ``` -3. Generate the Prisma client +First, make sure you are running the webapp according to the instructions above. Then: - ```sh - pnpm run generate - ``` +1. In Postgres go to the "Organizations" table and on your org set the `v3Enabled` column to `true`. - The above updates the prisma client generated into node_modules/.prisma/client folder. This helps with typing of relevant prisma models. It ensures typescript - recognizes fields added or removed from a model and type-checks appropriately. +2. Visit http://localhost:3030 in your browser and create a new V3 project called "v3-catalog". If you don't see an option for V3, you haven't set the `v3Enabled` flag to true. -4. Create and apply the migrations +3. In Postgres go to the "Projects" table and for the project you create change the `externalRef` to `yubjwjsfkxnylobaqvqz`. - ``` - pnpm run db:migrate:dev - ``` +4. Build the CLI - This creates a migration file and executes the migrations against your database and applies changes to the database schema(s) +```sh +pnpm run build --filter trigger.dev +``` -5. Commit generated migrations as well as changes to the schema.prisma file -6. If you're using VSCode you may need to restart the Typescript server in the webapp to get updated type inference. Open a TypeScript file, then open the Command Palette (View > Command Palette) and run `TypeScript: Restart TS server`. +5. Change into the `/references/v3-catalog` directory and authorize the CLI to the local server: -## Testing CLI changes +```sh +cd references/v3-catalog +pnpm exec triggerdev login -a http://localhost:3030 +``` -To test CLI changes, follow the steps below: +This will open a new browser window and authorize the CLI against your local user account. -1. Build the CLI and watch for changes +You can optionally pass a `--profile` flag to the `login` command, which will allow you to use the CLI with separate accounts/servers. We suggest using a profile called `local` for your local development: ```sh -cd packages/cli -pnpm run dev +cd references/v3-catalog +pnpm exec triggerdev login -a http://localhost:3030 --profile local +# later when you run the dev or deploy command: +pnpm exec triggerdev dev --profile local +pnpm exec triggerdev deploy --profile local ``` -2. Open a new Terminal window and run the webapp locally and then create a new project in the dashboard. Copy out the dev API key. +### Running -3. Create a new temporary Next.js app in references directory +The following steps should be followed any time you start working on a new feature you want to test in v3: -```sh -cd ./references -pnpm create next-app@latest test-cli --ts --no-eslint --tailwind --app --src-dir --import-alias "@/*" -``` +1. Make sure the webapp is running on localhost:3030 -4. Then once that's finished, add the `@trigger.dev/cli` to the `devDependencies` of the newly created Next.js app's `package.json` file, like so: +2. Open a terminal window and build the CLI and watch for changes -```json -{ - // other package.json properties - "devDependencies": { "@trigger.dev/cli": "workspace:*" } -} +```sh +pnpm run dev --filter trigger.dev ``` -5. Back in the terminal, navigate into the reference, and initialize the CLI. When prompted, select `self-hosted` and enter `localhost:3030` if you are testing against the local instance of Trigger.dev, or you can just use the Trigger.dev cloud. When asked for an API key, use the key you copied earlier. +2. Open a new terminal window, and anytime changes are made to the `@trigger.dev/core` package, you'll need to manually rebuild the CLI: ```sh -cd ./test-cli -pnpm i -pnpm exec trigger-cli init +pnpm run build --filter trigger.dev ``` -6. If you are just testing the `init` command, you can stop here. If you'd like to test the `dev` command, first start the Next.js app on port 3000: +Note: You do not need to do the same for `@trigger.dev/sdk`, just core. + +3. Open another terminal window, and change into the `/references/v3-catalog` directory. + +4. Run the `dev` command, which will register all the local tasks with the platform and allow you to start testing task execution: ```sh -pnpm run dev +# in /references/v3-catalog +pnpm exec triggerdev dev ``` -7. Open a new terminal window, and then run the `dev` command like so: +If you want additional debug logging, you can use the `--log-level debug` flag: ```sh -pnpm exec trigger-cli dev +# in /references/v3-catalog +pnpm exec triggerdev dev --log-level debug ``` -8. Please remember to delete the temporary project you created after you've tested the changes, and before you raise a PR. +5. If you make any changes in the CLI/Core/SDK, you'll need to `CTRL+C` to exit the `dev` command and restart it to pickup changes. Any changes to the files inside of the `v3-catalog/src/trigger` dir will automatically be rebuilt by the `dev` command. + +6. Navigate to the `v3-catalog` project in your local dashboard at localhost:3030 and you should see the list of tasks. + +7. Go to the "Test" page in the sidebar and select a task. Then enter a payload and click "Run test". You can tell what the payloads should be by looking at the relevant task file inside the `/references/v3-catalog/src/trigger` folder. Many of them accept an empty payload. -## Running end-to-end webapp tests +8. Feel free to add additional files in `v3-catalog/src/trigger` to test out specific aspects of the system, or add in edge cases. + +## Running end-to-end webapp tests (deprecated) To run the end-to-end tests, follow the steps below: @@ -223,6 +216,26 @@ The end-to-end tests use a `setup` and `teardown` script to seed the database wi pnpm run db:studio ``` +## Adding and running migrations + +1. Modify packages/database/prisma/schema.prisma file +2. Change directory to the packages/database folder + + ```sh + cd packages/database + ``` + +3. Create and apply the migrations + + ``` + pnpm run db:migrate:dev + ``` + + This creates a migration file and executes the migrations against your database and applies changes to the database schema(s) + +4. Commit generated migrations as well as changes to the schema.prisma file +5. If you're using VSCode you may need to restart the Typescript server in the webapp to get updated type inference. Open a TypeScript file, then open the Command Palette (View > Command Palette) and run `TypeScript: Restart TS server`. + ## Add sample jobs The [references/job-catalog](./references/job-catalog/) project defines simple jobs you can get started with. diff --git a/README.md b/README.md index be458ec2d9..84d6c541dc 100644 --- a/README.md +++ b/README.md @@ -5,7 +5,7 @@ Trigger.dev logo -### The open source background jobs framework +### The open source background jobs platform [Discord](https://trigger.dev/discord) | [Website](https://trigger.dev) | [Issues](https://github.com/triggerdotdev/trigger.dev/issues) | [Docs](https://trigger.dev/docs) @@ -14,7 +14,9 @@ -# About Trigger.dev +> The Trigger.dev v3 developer preview is now open. For more information and to get early access, check out our [developer preview launch post](https://trigger.dev/blog/v3-developer-preview-launch/). + +## About Trigger.dev Create long-running jobs directly in your codebase with features like API integrations, webhooks, scheduling and delays. @@ -48,30 +50,6 @@ View every Task in every Run so you can tell exactly what happened. Easily integrate with hundreds of third-party APIs – including your own. Use API keys (which never leave your server) or let us handle OAuth for you. Install our integration packages and easily subscribe to webhooks and perform common tasks, or you can easily use your existing favorite Node.JS SDKs and get resumability and idempotency through our `runTask` function. -## Our progress - -We’re building the most comprehensive and easy-to-use background jobs framework for developers. - -Click the links to join the discussions about our upcoming features. - -| Feature | What it does | Status | -| ------------------------------------------------------------------------------------ | --------------------------------------------------- | ------ | -| Integration kit | Official Trigger.dev integrations or build your own | βœ… | -| Self-hosting | Host the platform yourself | βœ… | -| Cloud | Just write code, no deployment required | βœ… | -| Dashboard | View every Task in every Run | βœ… | -| Serverless | Long-running Jobs on your serverless backend | βœ… | -| React hooks | Easily update your UI with Job progress | βœ… | -| React frameworks | Support for Remix, Astro, RedwoodJS & more | βœ… | -| [Background tasks](https://github.com/triggerdotdev/trigger.dev/discussions/400) | Offload long or intense Tasks to our infrastructure | πŸ› οΈ | -| [Long-running servers](https://github.com/triggerdotdev/trigger.dev/discussions/430) | Run Jobs on your long-running backend | πŸ› οΈ | -| Polling Triggers | Subscribe to changes without webhooks | πŸ• | -| Vercel integration | Easy deploy and preview environment support | πŸ• | -| Streaming | Receive data from your Jobs in realtime | πŸ• | -| 100+ integrations | Comprehensive support for popular APIs | πŸ• | -| [Trigger.dev Connect](https://github.com/triggerdotdev/trigger.dev/discussions/441) | Use integrations signed in as your users | πŸ• | -| File IO | Create Tasks that have file outputs | πŸ• | - # Getting started Visit our docs [here](https://trigger.dev/docs). diff --git a/apps/kubernetes-provider/src/index.ts b/apps/kubernetes-provider/src/index.ts index b6a8574a4c..8981c85d92 100644 --- a/apps/kubernetes-provider/src/index.ts +++ b/apps/kubernetes-provider/src/index.ts @@ -10,6 +10,7 @@ import { import { Machine, PostStartCauses, PreStopCauses, EnvironmentType } from "@trigger.dev/core/v3"; import { randomUUID } from "crypto"; import { TaskMonitor } from "./taskMonitor"; +import { PodCleaner } from "./podCleaner"; const RUNTIME_ENV = process.env.KUBERNETES_PORT ? "kubernetes" : "local"; const NODE_NAME = process.env.NODE_NAME || "local"; @@ -543,3 +544,11 @@ const taskMonitor = new TaskMonitor({ }); taskMonitor.start(); + +const podCleaner = new PodCleaner({ + runtimeEnv: RUNTIME_ENV, + namespace: "default", + intervalInSeconds: 300, +}); + +podCleaner.start(); diff --git a/apps/kubernetes-provider/src/podCleaner.ts b/apps/kubernetes-provider/src/podCleaner.ts new file mode 100644 index 0000000000..29955bab39 --- /dev/null +++ b/apps/kubernetes-provider/src/podCleaner.ts @@ -0,0 +1,264 @@ +import * as k8s from "@kubernetes/client-node"; +import { SimpleLogger } from "@trigger.dev/core-apps"; + +type PodCleanerOptions = { + runtimeEnv: "local" | "kubernetes"; + namespace?: string; + intervalInSeconds?: number; +}; + +export class PodCleaner { + private enabled = false; + private namespace = "default"; + private intervalInSeconds = 300; + + private logger = new SimpleLogger("[PodCleaner]"); + private k8sClient: { + core: k8s.CoreV1Api; + kubeConfig: k8s.KubeConfig; + }; + + constructor(private opts: PodCleanerOptions) { + if (opts.namespace) { + this.namespace = opts.namespace; + } + + if (opts.intervalInSeconds) { + this.intervalInSeconds = opts.intervalInSeconds; + } + + this.k8sClient = this.#createK8sClient(); + } + + #createK8sClient() { + const kubeConfig = new k8s.KubeConfig(); + + if (this.opts.runtimeEnv === "local") { + kubeConfig.loadFromDefault(); + } else if (this.opts.runtimeEnv === "kubernetes") { + kubeConfig.loadFromCluster(); + } else { + throw new Error(`Unsupported runtime environment: ${this.opts.runtimeEnv}`); + } + + return { + core: kubeConfig.makeApiClient(k8s.CoreV1Api), + kubeConfig: kubeConfig, + }; + } + + #isRecord(candidate: unknown): candidate is Record { + if (typeof candidate !== "object" || candidate === null) { + return false; + } else { + return true; + } + } + + #logK8sError(err: unknown, debugOnly = false) { + if (debugOnly) { + this.logger.debug("K8s API Error", err); + } else { + this.logger.error("K8s API Error", err); + } + } + + #handleK8sError(err: unknown) { + if (!this.#isRecord(err) || !this.#isRecord(err.body)) { + this.#logK8sError(err); + return; + } + + this.#logK8sError(err, true); + + if (typeof err.body.message === "string") { + this.#logK8sError({ message: err.body.message }); + return; + } + + this.#logK8sError({ body: err.body }); + } + + async #deletePods(opts: { + namespace: string; + dryRun?: boolean; + fieldSelector?: string; + labelSelector?: string; + }) { + return await this.k8sClient.core + .deleteCollectionNamespacedPod( + opts.namespace, + undefined, // pretty + undefined, // continue + opts.dryRun ? "All" : undefined, + opts.fieldSelector, + undefined, // gracePeriodSeconds + opts.labelSelector + ) + .catch(this.#handleK8sError.bind(this)); + } + + async #deleteCompletedRuns() { + this.logger.log("Deleting completed runs"); + + const start = Date.now(); + + const result = await this.#deletePods({ + namespace: this.namespace, + fieldSelector: "status.phase=Succeeded", + labelSelector: "app=task-run", + }); + + const elapsedMs = Date.now() - start; + + if (!result) { + this.logger.log("Deleting completed runs: No delete result", { elapsedMs }); + return; + } + + const total = (result.response as any)?.body?.items?.length ?? 0; + + this.logger.log("Deleting completed runs: Done", { total, elapsedMs }); + } + + async #deleteFailedRuns() { + this.logger.log("Deleting failed runs"); + + const start = Date.now(); + + const result = await this.#deletePods({ + namespace: this.namespace, + fieldSelector: "status.phase=Failed", + labelSelector: "app=task-run", + }); + + const elapsedMs = Date.now() - start; + + if (!result) { + this.logger.log("Deleting failed runs: No delete result", { elapsedMs }); + return; + } + + const total = (result.response as any)?.body?.items?.length ?? 0; + + this.logger.log("Deleting failed runs: Done", { total, elapsedMs }); + } + + async #deleteUnrecoverableRuns() { + await this.#deletePods({ + namespace: this.namespace, + fieldSelector: "status.phase=?", + labelSelector: "app=task-run", + }); + } + + async start() { + this.enabled = true; + this.logger.log("Starting"); + + const completedInterval = setInterval(async () => { + if (!this.enabled) { + clearInterval(completedInterval); + return; + } + + try { + await this.#deleteCompletedRuns(); + } catch (error) { + this.logger.error("Error deleting completed runs", error); + } + }, this.intervalInSeconds * 1000); + + const failedInterval = setInterval( + async () => { + if (!this.enabled) { + clearInterval(failedInterval); + return; + } + + try { + await this.#deleteFailedRuns(); + } catch (error) { + this.logger.error("Error deleting completed runs", error); + } + }, + // Use a longer interval for failed runs. This is only a backup in case the task monitor fails. + 2 * this.intervalInSeconds * 1000 + ); + + // this.#launchTests(); + } + + async stop() { + if (!this.enabled) { + return; + } + + this.enabled = false; + this.logger.log("Shutting down.."); + } + + async #launchTests() { + const createPod = async ( + container: k8s.V1Container, + name: string, + labels?: Record + ) => { + this.logger.log("Creating pod:", name); + + const pod = { + metadata: { + name, + labels, + }, + spec: { + restartPolicy: "Never", + automountServiceAccountToken: false, + terminationGracePeriodSeconds: 1, + containers: [container], + }, + } satisfies k8s.V1Pod; + + await this.k8sClient.core + .createNamespacedPod(this.namespace, pod) + .catch(this.#handleK8sError.bind(this)); + }; + + const createIdlePod = async (name: string, labels?: Record) => { + const container = { + name, + image: "docker.io/library/busybox", + command: ["sh"], + args: ["-c", "sleep infinity"], + } satisfies k8s.V1Container; + + await createPod(container, name, labels); + }; + + const createCompletedPod = async (name: string, labels?: Record) => { + const container = { + name, + image: "docker.io/library/busybox", + command: ["sh"], + args: ["-c", "true"], + } satisfies k8s.V1Container; + + await createPod(container, name, labels); + }; + + const createFailedPod = async (name: string, labels?: Record) => { + const container = { + name, + image: "docker.io/library/busybox", + command: ["sh"], + args: ["-c", "false"], + } satisfies k8s.V1Container; + + await createPod(container, name, labels); + }; + + await createIdlePod("test-idle-1", { app: "task-run" }); + await createFailedPod("test-failed-1", { app: "task-run" }); + await createCompletedPod("test-completed-1", { app: "task-run" }); + } +} diff --git a/apps/kubernetes-provider/src/taskMonitor.ts b/apps/kubernetes-provider/src/taskMonitor.ts index e07638ed26..c0296788da 100644 --- a/apps/kubernetes-provider/src/taskMonitor.ts +++ b/apps/kubernetes-provider/src/taskMonitor.ts @@ -30,10 +30,12 @@ type TaskMonitorOptions = { export class TaskMonitor { #enabled = false; + #logger = new SimpleLogger("[TaskMonitor]"); #taskInformer: ReturnType>; #processedPods = new Map(); #queue = new PQueue({ concurrency: 10 }); + #k8sClient: { core: k8s.CoreV1Api; kubeConfig: k8s.KubeConfig; @@ -44,6 +46,10 @@ export class TaskMonitor { private labelSelector = "app in (task-index, task-run)"; constructor(private opts: TaskMonitorOptions) { + if (opts.namespace) { + this.namespace = opts.namespace; + } + this.#k8sClient = this.#createK8sClient(); this.#taskInformer = this.#createTaskInformer(); diff --git a/apps/webapp/app/components/Feedback.tsx b/apps/webapp/app/components/Feedback.tsx index f7ad1b50f9..6caec83083 100644 --- a/apps/webapp/app/components/Feedback.tsx +++ b/apps/webapp/app/components/Feedback.tsx @@ -1,10 +1,14 @@ import { conform, useForm } from "@conform-to/react"; import { parse } from "@conform-to/zod"; +import { BookOpenIcon } from "@heroicons/react/20/solid"; import { ChevronRightIcon } from "@heroicons/react/24/solid"; import { Form, useActionData, useLocation, useNavigation } from "@remix-run/react"; import { DiscordIcon, GitHubLightIcon } from "@trigger.dev/companyicons"; +import { ActivityIcon } from "lucide-react"; import { ReactNode, useState } from "react"; import { FeedbackType, feedbackTypeLabel, schema } from "~/routes/resources.feedback"; +import { cn } from "~/utils/cn"; +import { docsPath } from "~/utils/pathBuilder"; import { Button, LinkButton } from "./primitives/Buttons"; import { Fieldset } from "./primitives/Fieldset"; import { FormButtons } from "./primitives/FormButtons"; @@ -13,20 +17,9 @@ import { Header1, Header2 } from "./primitives/Headers"; import { InputGroup } from "./primitives/InputGroup"; import { Label } from "./primitives/Label"; import { Paragraph } from "./primitives/Paragraph"; -import { - Select, - SelectContent, - SelectGroup, - SelectItem, - SelectTrigger, - SelectValue, -} from "./primitives/Select"; +import { Select, SelectItem } from "./primitives/Select"; import { Sheet, SheetBody, SheetContent, SheetTrigger } from "./primitives/Sheet"; import { TextArea } from "./primitives/TextArea"; -import { cn } from "~/utils/cn"; -import { BookOpenIcon } from "@heroicons/react/20/solid"; -import { ActivityIcon, HeartPulseIcon } from "lucide-react"; -import { docsPath } from "~/utils/pathBuilder"; type FeedbackProps = { button: ReactNode; @@ -78,20 +71,20 @@ export function Feedback({ button, defaultValue = "bug" }: FeedbackProps) {
- - - + {feedbackType.error} @@ -122,14 +115,7 @@ export function Feedback({ button, defaultValue = "bug" }: FeedbackProps) { Docs - v3 Docs (Developer preview) - - diff --git a/apps/webapp/app/components/admin/debugTooltip.tsx b/apps/webapp/app/components/admin/debugTooltip.tsx index a0b075cc19..5a7d805c94 100644 --- a/apps/webapp/app/components/admin/debugTooltip.tsx +++ b/apps/webapp/app/components/admin/debugTooltip.tsx @@ -20,9 +20,11 @@ export function AdminDebugTooltip({ children }: { children: React.ReactNode }) { - + - {children} + + {children} + ); diff --git a/apps/webapp/app/components/billing/PricingTiers.tsx b/apps/webapp/app/components/billing/PricingTiers.tsx index 30b9c2a006..dfe216f439 100644 --- a/apps/webapp/app/components/billing/PricingTiers.tsx +++ b/apps/webapp/app/components/billing/PricingTiers.tsx @@ -290,6 +290,7 @@ export function TierPro({ options={concurrencyTiers.map((c) => ({ label: `Up to ${c.upto}`, value: c.code }))} fullWidth value={concurrentBracketCode} + variant="primary" onChange={(v) => setConcurrentBracketCode(v)} />
diff --git a/apps/webapp/app/components/code/CodeBlock.tsx b/apps/webapp/app/components/code/CodeBlock.tsx index a1a496ad5c..ff0956a35b 100644 --- a/apps/webapp/app/components/code/CodeBlock.tsx +++ b/apps/webapp/app/components/code/CodeBlock.tsx @@ -1,6 +1,6 @@ import { Clipboard, ClipboardCheck } from "lucide-react"; import type { Language, PrismTheme } from "prism-react-renderer"; -import Highlight, { defaultProps } from "prism-react-renderer"; +import { Highlight } from "prism-react-renderer"; import { forwardRef, useCallback, useState } from "react"; import { cn } from "~/utils/cn"; import { Tooltip, TooltipContent, TooltipProvider, TooltipTrigger } from "../primitives/Tooltip"; @@ -238,7 +238,7 @@ export const CodeBlock = forwardRef( )} {shouldHighlight ? ( - + {({ className: inheritedClassName, style: inheritedStyle, @@ -283,7 +283,7 @@ export const CodeBlock = forwardRef( return (
( const tokenProps = getTokenProps({ token, key }); return ( ; const variants = { @@ -7,6 +9,30 @@ const variants = { large: "h-6 text-xs px-1.5 rounded", }; +export function EnvironmentTypeLabel({ + environment, + size = "small", + className, +}: { + environment: Environment; + size?: keyof typeof variants; + className?: string; +}) { + return ( + + {environmentTypeTitle(environment)} + + ); +} + export function EnvironmentLabel({ environment, size = "small", @@ -33,6 +59,74 @@ export function EnvironmentLabel({ ); } +type EnvironmentWithUsername = Environment & { userName?: string }; + +export function EnvironmentLabels({ + environments, + size = "small", + className, +}: { + environments: EnvironmentWithUsername[]; + size?: keyof typeof variants; + className?: string; +}) { + const devEnvironments = sortEnvironments( + environments.filter((env) => env.type === "DEVELOPMENT") + ); + const firstDevEnvironment = devEnvironments[0]; + const otherDevEnvironments = devEnvironments.slice(1); + const otherEnvironments = environments.filter((env) => env.type !== "DEVELOPMENT"); + + return ( +
+ {firstDevEnvironment && ( + + )} + {otherDevEnvironments.length > 0 ? ( + + +{otherDevEnvironments.length} + + } + content={ +
+ {otherDevEnvironments.map((environment, index) => ( + + ))} +
+ } + /> + ) : null} + {otherEnvironments.map((environment, index) => ( + + ))} +
+ ); +} + export function environmentTitle(environment: Environment, username?: string) { switch (environment.type) { case "PRODUCTION": @@ -46,6 +140,19 @@ export function environmentTitle(environment: Environment, username?: string) { } } +export function environmentTypeTitle(environment: Environment) { + switch (environment.type) { + case "PRODUCTION": + return "Prod"; + case "STAGING": + return "Staging"; + case "DEVELOPMENT": + return "Dev"; + case "PREVIEW": + return "Preview"; + } +} + export function environmentColorClassName(environment: Environment) { switch (environment.type) { case "PRODUCTION": diff --git a/apps/webapp/app/components/events/EventsFilters.tsx b/apps/webapp/app/components/events/EventsFilters.tsx index 13711d804f..c210119349 100644 --- a/apps/webapp/app/components/events/EventsFilters.tsx +++ b/apps/webapp/app/components/events/EventsFilters.tsx @@ -9,7 +9,7 @@ import { SelectItem, SelectTrigger, SelectValue, -} from "../primitives/Select"; +} from "../primitives/SimpleSelect"; import { EventListSearchSchema } from "./EventStatuses"; import { environmentKeys, FilterableEnvironment } from "~/components/runs/RunStatuses"; import { TimeFrameFilter } from "../runs/TimeFrameFilter"; diff --git a/apps/webapp/app/components/integrations/ConnectToOAuthForm.tsx b/apps/webapp/app/components/integrations/ConnectToOAuthForm.tsx index a6757a6dbd..3f4e6159a8 100644 --- a/apps/webapp/app/components/integrations/ConnectToOAuthForm.tsx +++ b/apps/webapp/app/components/integrations/ConnectToOAuthForm.tsx @@ -12,7 +12,7 @@ import { ApiAuthenticationMethodOAuth2, Integration, Scope } from "~/services/ex import { cn } from "~/utils/cn"; import { CodeBlock } from "../code/CodeBlock"; import { Button } from "../primitives/Buttons"; -import { Checkbox } from "../primitives/Checkbox"; +import { CheckboxWithLabel } from "../primitives/Checkbox"; import { Fieldset } from "../primitives/Fieldset"; import { FormError } from "../primitives/FormError"; import { Header2, Header3 } from "../primitives/Headers"; @@ -123,7 +123,7 @@ export function ConnectToOAuthForm({ To use your own OAuth app, check the option below and insert the details. - { return ( - To use your own OAuth app, check the option below and insert the details. - { return ( -
- + - In your Slack app, create a new channel from the main menu by going to File{" "} - New Channel - - - - - - Name your channel, set its visibility and click 'Create'. - - - - - - Invite this email address to your channel:{" "} + Send us an email to this address from your Trigger.dev account email + address: + + + - As soon as we can, we'll accept your invitation and say hello! + As soon as we can, we'll setup a Slack Connect channel and say hello!
@@ -549,6 +542,8 @@ function V3ProjectSideMenu({ project: SideMenuProject; organization: MatchedOrganization; }) { + const { alertsEnabled } = useFeatures(); + return ( <> @@ -601,6 +596,15 @@ function V3ProjectSideMenu({ to={v3DeploymentsPath(organization, project)} data-action="deployments" /> + {alertsEnabled && ( + + )} void; + variant?: Variant; + className?: string; +}; + +export function AppliedFilter({ + label, + value, + removable = true, + onRemove, + variant = "tertiary/small", + className, +}: AppliedFilterProps) { + const variantClassName = variants[variant]; + return ( +
+
+
+ {label}: +
+
+
{value}
+
+
+ {removable && ( + + )} +
+ ); +} diff --git a/apps/webapp/app/components/primitives/Buttons.tsx b/apps/webapp/app/components/primitives/Buttons.tsx index 88731a9b7c..e292991941 100644 --- a/apps/webapp/app/components/primitives/Buttons.tsx +++ b/apps/webapp/app/components/primitives/Buttons.tsx @@ -48,11 +48,11 @@ const theme = { "border-black/40 text-charcoal-900 group-hover:border-black/60 group-hover:text-charcoal-900", }, secondary: { - textColor: "text-primary group-hover:text-apple-200 transition group-disabled:text-primary", + textColor: "text-secondary group-hover:text-secondary transition group-disabled:text-secondary", button: - "bg-transparent border border-primary group-hover:border-apple-200 group-hover:bg-apple-950 group-disabled:opacity-30 group-disabled:border-primary group-disabled:bg-transparent group-disabled:pointer-events-none", + "bg-transparent border border-secondary group-hover:border-secondary group-hover:bg-secondary/10 group-disabled:opacity-30 group-disabled:border-secondary group-disabled:bg-transparent group-disabled:pointer-events-none", shortcut: - "border-primary/30 text-apple-200 group-hover:text-text-bright/80 group-hover:border-dimmed/60", + "border-secondary/30 text-secondary group-hover:text-text-bright/80 group-hover:border-dimmed/60", }, tertiary: { textColor: "text-text-bright transition group-disabled:text-text-dimmed/80", diff --git a/apps/webapp/app/components/primitives/Checkbox.tsx b/apps/webapp/app/components/primitives/Checkbox.tsx index eea6b62a54..20003c5e68 100644 --- a/apps/webapp/app/components/primitives/Checkbox.tsx +++ b/apps/webapp/app/components/primitives/Checkbox.tsx @@ -1,5 +1,5 @@ import * as React from "react"; -import { useEffect, useState } from "react"; +import { forwardRef, useEffect, useState } from "react"; import { cn } from "~/utils/cn"; import { Badge } from "./Badge"; import { Paragraph } from "./Paragraph"; @@ -53,18 +53,18 @@ export type CheckboxProps = Omit< React.InputHTMLAttributes, "checked" | "onChange" > & { - id: string; + id?: string; name?: string; value?: string; variant?: keyof typeof variants; - label?: React.ReactNode; + label: React.ReactNode; description?: string; badges?: string[]; className?: string; onChange?: (isChecked: boolean) => void; }; -export const Checkbox = React.forwardRef( +export const CheckboxWithLabel = React.forwardRef( ( { id, @@ -172,3 +172,21 @@ export const Checkbox = React.forwardRef( ); } ); + +type SimpleCheckboxProps = Omit, "type">; + +export const Checkbox = forwardRef( + ({ className, ...props }: SimpleCheckboxProps, ref) => { + return ( + + ); + } +); diff --git a/apps/webapp/app/components/primitives/DetailCell.tsx b/apps/webapp/app/components/primitives/DetailCell.tsx index 0be8597c88..4c1c7ec264 100644 --- a/apps/webapp/app/components/primitives/DetailCell.tsx +++ b/apps/webapp/app/components/primitives/DetailCell.tsx @@ -49,12 +49,7 @@ export function DetailCell({ const variation = variations[variant]; return ( -
+
{label} {description && ( {description} diff --git a/apps/webapp/app/components/primitives/Dialog.tsx b/apps/webapp/app/components/primitives/Dialog.tsx index 5f04b94b61..a6c0bf8134 100644 --- a/apps/webapp/app/components/primitives/Dialog.tsx +++ b/apps/webapp/app/components/primitives/Dialog.tsx @@ -43,7 +43,7 @@ const DialogContent = React.forwardRef< ) => (
); diff --git a/apps/webapp/app/components/primitives/SegmentedControl.tsx b/apps/webapp/app/components/primitives/SegmentedControl.tsx index 5576ad2cf4..024f4ee54c 100644 --- a/apps/webapp/app/components/primitives/SegmentedControl.tsx +++ b/apps/webapp/app/components/primitives/SegmentedControl.tsx @@ -2,6 +2,19 @@ import { RadioGroup } from "@headlessui/react"; import { motion } from "framer-motion"; import { cn } from "~/utils/cn"; +const variants = { + primary: { + base: "bg-charcoal-700", + active: "text-text-bright hover:bg-charcoal-750/50", + }, + secondary: { + base: "bg-charcoal-700/50", + active: "text-text-bright bg-charcoal-700 rounded-[2px] border border-charcoal-600/50", + }, +}; + +type Variants = keyof typeof variants; + type Options = { label: string; value: string; @@ -12,6 +25,7 @@ type SegmentedControlProps = { value?: string; defaultValue?: string; options: Options[]; + variant?: Variants; fullWidth?: boolean; onChange?: (value: string) => void; }; @@ -21,11 +35,18 @@ export default function SegmentedControl({ value, defaultValue, options, + variant = "secondary", fullWidth, onChange, }: SegmentedControlProps) { return ( -
+
@@ -60,12 +81,12 @@ export default function SegmentedControl({
{option.label}
- {checked && ( + {checked && variant === "primary" && ( + /> )}
diff --git a/apps/webapp/app/components/primitives/Select.tsx b/apps/webapp/app/components/primitives/Select.tsx index 61ea34133f..6e9d73aee7 100644 --- a/apps/webapp/app/components/primitives/Select.tsx +++ b/apps/webapp/app/components/primitives/Select.tsx @@ -1,143 +1,624 @@ -"use client"; - -import * as SelectPrimitive from "@radix-ui/react-select"; -import { Check, ChevronDown } from "lucide-react"; +import * as Ariakit from "@ariakit/react"; +import { SelectProps as AriaSelectProps } from "@ariakit/react"; +import { SelectValue } from "@ariakit/react-core/select/select-value"; +import { Link } from "@remix-run/react"; import * as React from "react"; +import { Fragment, useMemo, useState } from "react"; +import { ShortcutDefinition, useShortcutKeys } from "~/hooks/useShortcutKeys"; import { cn } from "~/utils/cn"; +import { ShortcutKey } from "./ShortcutKey"; +import { ChevronDown } from "lucide-react"; const sizes = { - "secondary/small": - "text-xs h-6 bg-tertiary border border-tertiary group-hover:text-text-bright hover:border-charcoal-600 pr-2 pl-1.5", - medium: "text-sm h-8 bg-tertiary border border-tertiary hover:border-charcoal-600 px-2.5", - minimal: "text-xs h-6 bg-transparent hover:bg-tertiary pl-1.5 pr-2", + small: { + button: "h-6 rounded text-xs px-2 ", + }, + medium: { + button: "h-8 rounded text-xs px-3 text-sm", + }, +}; + +const style = { + tertiary: { + button: + "bg-tertiary focus-within:ring-charcoal-500 border border-tertiary hover:text-text-bright hover:border-charcoal-600", + }, + minimal: { + button: + "bg-transparent focus-within:ring-charcoal-500 hover:bg-tertiary disabled:bg-transparent disabled:pointer-events-none", + }, }; -export type SelectProps = { - size?: keyof typeof sizes; - width?: "content" | "full"; +const variants = { + "tertiary/small": { + button: cn(sizes.small.button, style.tertiary.button), + }, + "tertiary/medium": { + button: cn(sizes.medium.button, style.tertiary.button), + }, + "minimal/small": { + button: cn(sizes.small.button, style.minimal.button), + }, + "minimal/medium": { + button: cn(sizes.medium.button, style.minimal.button), + }, }; -const Select = SelectPrimitive.Root; -const SelectGroup = SelectPrimitive.Group; -const SelectValue = SelectPrimitive.Value; -const SelectTrigger = React.forwardRef< - React.ElementRef, - React.ComponentPropsWithoutRef & SelectProps ->(({ className, children, width = "content", size = "secondary/small", ...props }, ref) => { - const sizeClassName = sizes[size]; +type Variant = keyof typeof variants; + +type Section = { + type: "section"; + title?: string; + items: TItem[]; +}; + +function isSection(data: TItem[] | Section[]): data is Section[] { + const firstItem = data[0]; return ( - ).type === "section" && + (firstItem as Section).items !== undefined && + Array.isArray((firstItem as Section).items) + ); +} + +type ItemFromSection = TItemOrSection extends Section ? U : TItemOrSection; +export interface SelectProps + extends Omit { + icon?: React.ReactNode; + text?: React.ReactNode | ((value: TValue) => React.ReactNode); + placeholder?: React.ReactNode; + value?: Ariakit.SelectProviderProps["value"]; + setValue?: Ariakit.SelectProviderProps["setValue"]; + defaultValue?: Ariakit.SelectProviderProps["defaultValue"]; + label?: string | Ariakit.SelectLabelProps["render"]; + heading?: string; + showHeading?: boolean; + items?: TItem[] | Section[]; + empty?: React.ReactNode; + filter?: (item: ItemFromSection, search: string, title?: string) => boolean; + children: + | React.ReactNode + | (( + items: ItemFromSection[], + meta: { + shortcutsEnabled?: boolean; + section?: { + title?: string; + startIndex: number; + count: number; + }; + } + ) => React.ReactNode); + variant?: Variant; + open?: boolean; + setOpen?: (open: boolean) => void; + shortcut?: ShortcutDefinition; + allowItemShortcuts?: boolean; + clearSearchOnSelection?: boolean; + dropdownIcon?: boolean | React.ReactNode; +} + +export function Select({ + children, + icon, + text, + placeholder, + value, + setValue, + defaultValue, + label, + heading, + showHeading = false, + items, + filter, + empty = null, + variant = "tertiary/small", + open, + setOpen, + shortcut, + allowItemShortcuts = true, + disabled, + clearSearchOnSelection = true, + dropdownIcon, + ...props +}: SelectProps) { + const [searchValue, setSearchValue] = useState(""); + const searchable = items !== undefined && filter !== undefined; + + const matches = useMemo(() => { + if (!items) return []; + if (!searchValue || !filter) return items; + + if (isSection(items)) { + return items + .map((section) => ({ + ...section, + items: section.items.filter((item) => + filter(item as ItemFromSection, searchValue, section.title) + ), + })) + .filter((section) => section.items.length > 0); + } + + return items.filter((item) => filter(item as ItemFromSection, searchValue)); + }, [searchValue, items]); + + const enableItemShortcuts = allowItemShortcuts && matches.length === items?.length; + + const select = ( + { + if (clearSearchOnSelection) { + setSearchValue(""); + } + + if (setValue) { + setValue(v as any); + } + }} + defaultValue={defaultValue} > - {children} - - {label}
: label} />} + + + {!searchable && showHeading && heading && {heading}} />} + {searchable && } + + + {typeof children === "function" ? ( + matches.length > 0 ? ( + isSection(matches) ? ( + + ) : ( + children(matches as ItemFromSection[], { + shortcutsEnabled: enableItemShortcuts, + }) + ) + ) : ( + empty + ) + ) : ( + children )} - /> - - + + + ); -}); -SelectTrigger.displayName = SelectPrimitive.Trigger.displayName; - -const SelectContent = React.forwardRef< - React.ElementRef, - React.ComponentPropsWithoutRef ->(({ className, children, position = "popper", ...props }, ref) => ( - - { + React.startTransition(() => { + setSearchValue(value); + }); + }} + > + {select} + + ); + } + + return select; +} + +export interface SelectTriggerProps extends AriaSelectProps { + icon?: React.ReactNode; + text?: React.ReactNode | ((value: TValue) => React.ReactNode); + placeholder?: React.ReactNode; + variant?: Variant; + shortcut?: ShortcutDefinition; + tooltipTitle?: string; + dropdownIcon?: boolean | React.ReactNode; +} + +export function SelectTrigger({ + icon, + variant = "tertiary/small", + text, + shortcut, + tooltipTitle, + disabled, + placeholder, + dropdownIcon = false, + children, + className, + ...props +}: SelectTriggerProps) { + const ref = React.useRef(null); + useShortcutKeys({ + shortcut: shortcut, + action: (e) => { + e.preventDefault(); + e.stopPropagation(); + if (ref.current) { + ref.current.click(); + } + }, + disabled, + }); + + const showTooltip = tooltipTitle || shortcut; + const variantClasses = variants[variant]; + + let content: React.ReactNode = ""; + if (children) { + content = children; + } else if (text !== undefined) { + if (typeof text === "function") { + content = {(value) => <>{text(value) ?? placeholder}}; + } else { + content = text; + } + } else { + content = ( + + {(value) => ( + <> + {typeof value === "string" + ? value ?? placeholder + : value.length === 0 + ? placeholder + : value.join(", ")} + + )} + + ); + } + + return ( + + + } + > +
+ {icon &&
{icon}
} +
{content}
+
+ {dropdownIcon === true ? ( + + ) : !dropdownIcon ? null : ( + dropdownIcon + )} +
+ {showTooltip && ( + +
+ {tooltipTitle ?? "Open menu"} + {shortcut && ( + + )} +
+
+ )} +
+ ); +} + +export interface SelectProviderProps + extends Ariakit.SelectProviderProps {} +export function SelectProvider( + props: SelectProviderProps +) { + return ; +} + +export interface ComboboxProviderProps extends Ariakit.ComboboxProviderProps {} +export function ComboboxProvider(props: ComboboxProviderProps) { + return ; +} + +function SelectGroupedRenderer({ + items, + children, + enableItemShortcuts, +}: { + items: Section[]; + children: ( + items: ItemFromSection[], + meta: { + shortcutsEnabled?: boolean; + section?: { title?: string; startIndex: number; count: number }; + } + ) => React.ReactNode; + enableItemShortcuts: boolean; +}) { + let count = 0; + return ( + <> + {items.map((section, index) => { + const previousItem = items.at(index - 1); + count += previousItem ? previousItem.items.length : 0; + return ( + + {children(section.items as ItemFromSection[], { + shortcutsEnabled: enableItemShortcuts, + section: { + title: section.title, + startIndex: count - 1, + count: section.items.length, + }, + })} + + ); + })} + + ); +} + +export interface SelectListProps extends Omit {} +export function SelectList(props: SelectListProps) { + const combobox = Ariakit.useComboboxContext(); + const Component = combobox ? Ariakit.ComboboxList : Ariakit.SelectList; + + return ( + + ); +} + +export interface SelectItemProps extends Ariakit.SelectItemProps { + icon?: React.ReactNode; + checkIcon?: React.ReactNode; + shortcut?: ShortcutDefinition; +} + +const selectItemClasses = + "group cursor-pointer px-1 pt-1 text-xs text-text-dimmed outline-none last:pb-1"; + +export function SelectItem({ + icon, + checkIcon = , + shortcut, + ...props +}: SelectItemProps) { + const combobox = Ariakit.useComboboxContext(); + const render = combobox ? : undefined; + const ref = React.useRef(null); + + useShortcutKeys({ + shortcut: shortcut, + action: (e) => { + e.preventDefault(); + e.stopPropagation(); + if (ref.current) { + ref.current.click(); + } + }, + disabled: props.disabled, + enabledOnInputElements: true, + }); + + return ( + - + {icon} +
{props.children || props.value}
+ {checkIcon} + {shortcut && ( + )} - > - {children} -
-
-
-)); -SelectContent.displayName = SelectPrimitive.Content.displayName; - -const SelectLabel = React.forwardRef< - React.ElementRef, - React.ComponentPropsWithoutRef ->(({ className, ...props }, ref) => ( - -)); -SelectLabel.displayName = SelectPrimitive.Label.displayName; - -type SelectItemProps = React.ComponentPropsWithoutRef & { - contentClassName?: string; -}; +
+ + ); +} -const SelectItem = React.forwardRef, SelectItemProps>( - ({ className, children, contentClassName, ...props }, ref) => ( - , + to, + ...props +}: SelectLinkItemProps) { + const render = ; + + return ( + + ); +} + +export interface SelectButtonItemProps extends Omit { + icon?: React.ReactNode; + checkIcon?: React.ReactNode; + shortcut?: ShortcutDefinition; + onClick: React.ComponentProps<"button">["onClick"]; +} + +export function SelectButtonItem({ + checkIcon = , + onClick, + ...props +}: SelectButtonItemProps) { + const render = ( + + + )} +
); +} - const handleFilterChange = useCallback((filterType: string, value: string | undefined) => { - if (value) { - searchParams.set(filterType, value); - } else { - searchParams.delete(filterType); - } - searchParams.delete("cursor"); - searchParams.delete("direction"); - navigate(`${location.pathname}?${searchParams.toString()}`); - }, []); - - const handleStatusChange = useCallback((value: TaskRunAttemptStatus | typeof All) => { - handleFilterChange("statuses", value === "ALL" ? undefined : value); - }, []); - - const handleTaskChange = useCallback((value: string | typeof All) => { - handleFilterChange("tasks", value === "ALL" ? undefined : value); - }, []); - - const handleEnvironmentChange = useCallback((value: string | typeof All) => { - handleFilterChange("environments", value === "ALL" ? undefined : value); - }, []); - - const handleTimeFrameChange = useCallback((range: { from?: number; to?: number }) => { - if (range.from) { - searchParams.set("from", range.from.toString()); - } else { - searchParams.delete("from"); - } +const filterTypes = [ + { + name: "statuses", + title: "Status", + icon: ( +
+
+
+ ), + }, + { name: "environments", title: "Environment", icon: }, + { name: "tasks", title: "Tasks", icon: }, + { name: "created", title: "Created", icon: }, + { name: "bulk", title: "Bulk action", icon: }, +] as const; - if (range.to) { - searchParams.set("to", range.to.toString()); - } else { - searchParams.delete("to"); - } +type FilterType = (typeof filterTypes)[number]["name"]; - searchParams.delete("cursor"); - searchParams.delete("direction"); - navigate(`${location.pathname}?${searchParams.toString()}`); - }, []); +const shortcut = { key: "f" }; - const clearFilters = useCallback(() => { - searchParams.delete("statuses"); - searchParams.delete("environments"); - searchParams.delete("tasks"); - searchParams.delete("from"); - searchParams.delete("to"); - navigate(`${location.pathname}?${searchParams.toString()}`); - }, []); +function FilterMenu(props: RunFiltersProps) { + const [filterType, setFilterType] = useState(); + + const filterTrigger = ( + + +
+ } + variant={"minimal/small"} + shortcut={shortcut} + tooltipTitle={"Filter runs"} + > + Filter + + ); return ( -
- - - - - - - - - - - - - - -
+ ))} + + + + ); +} + +function AppliedStatusFilter() { + const { values, del } = useSearchParams(); + const statuses = values("statuses"); + + if (statuses.length === 0) { + return null; + } + + return ( + + {(search, setSearch) => ( + }> + runStatusTitle(v as TaskRunStatus)))} + onRemove={() => del(["statuses", "cursor", "direction"])} + /> + + } + searchValue={search} + clearSearchValue={() => setSearch("")} + /> + )} + ); } + +function EnvironmentsDropdown({ + trigger, + clearSearchValue, + searchValue, + onClose, + possibleEnvironments, +}: { + trigger: ReactNode; + clearSearchValue: () => void; + searchValue: string; + onClose?: () => void; + possibleEnvironments: DisplayableEnvironment[]; +}) { + const { values, replace } = useSearchParams(); + + const handleChange = (values: string[]) => { + clearSearchValue(); + replace({ environments: values, cursor: undefined, direction: undefined }); + }; + + const filtered = useMemo(() => { + return possibleEnvironments.filter((item) => { + const title = environmentTitle(item, item.userName); + return title.toLowerCase().includes(searchValue.toLowerCase()); + }); + }, [searchValue, possibleEnvironments]); + + return ( + + {trigger} + { + if (onClose) { + onClose(); + return false; + } + + return true; + }} + > + + + {filtered.map((item, index) => ( + + + + ))} + + + + ); +} + +function AppliedEnvironmentFilter({ + possibleEnvironments, +}: Pick) { + const { values, del } = useSearchParams(); + + if (values("environments").length === 0) { + return null; + } + + return ( + + {(search, setSearch) => ( + }> + { + const environment = possibleEnvironments.find((env) => env.id === v); + return environment ? environmentTitle(environment, environment.userName) : v; + }) + )} + onRemove={() => del(["environments", "cursor", "direction"])} + /> + + } + searchValue={search} + clearSearchValue={() => setSearch("")} + possibleEnvironments={possibleEnvironments} + /> + )} + + ); +} + +function TasksDropdown({ + trigger, + clearSearchValue, + searchValue, + onClose, + possibleTasks, +}: { + trigger: ReactNode; + clearSearchValue: () => void; + searchValue: string; + onClose?: () => void; + possibleTasks: { slug: string; triggerSource: TaskTriggerSource }[]; +}) { + const { values, replace } = useSearchParams(); + + const handleChange = (values: string[]) => { + clearSearchValue(); + replace({ tasks: values, cursor: undefined, direction: undefined }); + }; + + const filtered = useMemo(() => { + return possibleTasks.filter((item) => { + return item.slug.toLowerCase().includes(searchValue.toLowerCase()); + }); + }, [searchValue, possibleTasks]); + + return ( + + {trigger} + { + if (onClose) { + onClose(); + return false; + } + + return true; + }} + > + + + {filtered.map((item, index) => ( + } + > + {item.slug} + + ))} + + + + ); +} + +function AppliedTaskFilter({ possibleTasks }: Pick) { + const { values, del } = useSearchParams(); + + if (values("tasks").length === 0) { + return null; + } + + return ( + + {(search, setSearch) => ( + }> + { + const task = possibleTasks.find((task) => task.slug === v); + return task ? task.slug : v; + }) + )} + onRemove={() => del(["tasks", "cursor", "direction"])} + /> + + } + searchValue={search} + clearSearchValue={() => setSearch("")} + possibleTasks={possibleTasks} + /> + )} + + ); +} + +function BulkActionsDropdown({ + trigger, + clearSearchValue, + searchValue, + onClose, + bulkActions, +}: { + trigger: ReactNode; + clearSearchValue: () => void; + searchValue: string; + onClose?: () => void; + bulkActions: RunFiltersProps["bulkActions"]; +}) { + const { value, replace } = useSearchParams(); + + const handleChange = (value: string) => { + clearSearchValue(); + replace({ bulkId: value, cursor: undefined, direction: undefined }); + }; + + const filtered = useMemo(() => { + return bulkActions.filter((item) => { + return ( + item.type.toLowerCase().includes(searchValue.toLowerCase()) || + item.createdAt.toISOString().includes(searchValue) + ); + }); + }, [searchValue, bulkActions]); + + return ( + + {trigger} + { + if (onClose) { + onClose(); + return false; + } + + return true; + }} + > + + + None + {filtered.map((item, index) => ( + +
+ + +
+
+ ))} +
+
+
+ ); +} + +function AppliedBulkActionsFilter({ bulkActions }: Pick) { + const { value, del } = useSearchParams(); + + const bulkId = value("bulkId"); + + if (!bulkId) { + return null; + } + + const action = bulkActions.find((action) => action.id === bulkId); + + return ( + + {(search, setSearch) => ( + }> + del(["bulkId", "cursor", "direction"])} + /> + + } + searchValue={search} + clearSearchValue={() => setSearch("")} + bulkActions={bulkActions} + /> + )} + + ); +} + +const timePeriods = [ + { + label: "All periods", + value: "all", + }, + { + label: "5 mins ago", + value: "5m", + }, + { + label: "15 mins ago", + value: "15m", + }, + { + label: "30 mins ago", + value: "30m", + }, + { + label: "1 hour ago", + value: "1h", + }, + { + label: "3 hours ago", + value: "3h", + }, + { + label: "6 hours ago", + value: "6h", + }, + { + label: "1 day ago", + value: "1d", + }, + { + label: "3 days ago", + value: "3d", + }, + { + label: "7 days ago", + value: "7d", + }, + { + label: "10 days ago", + value: "10d", + }, + { + label: "14 days ago", + value: "14d", + }, + { + label: "30 days ago", + value: "30d", + }, +]; + +function CreatedDropdown({ + trigger, + clearSearchValue, + searchValue, + onClose, +}: { + trigger: ReactNode; + clearSearchValue: () => void; + searchValue: string; + onClose?: () => void; +}) { + const { value, replace } = useSearchParams(); + + const handleChange = (newValue: string) => { + clearSearchValue(); + if (newValue === "all") { + if (!value) return; + } + + replace({ period: newValue, cursor: undefined, direction: undefined }); + }; + + const filtered = useMemo(() => { + return timePeriods.filter((item) => + item.label.toLowerCase().includes(searchValue.toLowerCase()) + ); + }, [searchValue]); + + return ( + + {trigger} + { + if (onClose) { + onClose(); + return false; + } + + return true; + }} + > + + + {filtered.map((item) => ( + + {item.label} + + ))} + + + + ); +} + +function AppliedPeriodFilter() { + const { value, del } = useSearchParams(); + + if (value("period") === undefined || value("period") === "all") { + return null; + } + + return ( + + {(search, setSearch) => ( + }> + t.value === value("period"))?.label ?? value("period") + } + onRemove={() => del(["period", "cursor", "direction"])} + /> + + } + searchValue={search} + clearSearchValue={() => setSearch("")} + /> + )} + + ); +} + +function appliedSummary(values: string[], maxValues = 3) { + if (values.length === 0) { + return null; + } + + if (values.length > maxValues) { + return `${values.slice(0, maxValues).join(", ")} + ${values.length - maxValues} more`; + } + + return values.join(", "); +} diff --git a/apps/webapp/app/components/runs/v3/ScheduleFilters.tsx b/apps/webapp/app/components/runs/v3/ScheduleFilters.tsx index 6a042b7bc1..6bd5618e55 100644 --- a/apps/webapp/app/components/runs/v3/ScheduleFilters.tsx +++ b/apps/webapp/app/components/runs/v3/ScheduleFilters.tsx @@ -1,9 +1,11 @@ -import { TrashIcon, XMarkIcon } from "@heroicons/react/20/solid"; +import { XMarkIcon } from "@heroicons/react/20/solid"; import { useNavigate } from "@remix-run/react"; import { RuntimeEnvironment } from "@trigger.dev/database"; import { useCallback } from "react"; import { z } from "zod"; +import { Input } from "~/components/primitives/Input"; import { useOptimisticLocation } from "~/hooks/useOptimisticLocation"; +import { useThrottle } from "~/hooks/useThrottle"; import { EnvironmentLabel } from "../../environments/EnvironmentLabel"; import { Button } from "../../primitives/Buttons"; import { Paragraph } from "../../primitives/Paragraph"; @@ -14,10 +16,7 @@ import { SelectItem, SelectTrigger, SelectValue, -} from "../../primitives/Select"; -import { Input } from "~/components/primitives/Input"; -import { useDebounce } from "~/hooks/useDebounce"; -import { useThrottle } from "~/hooks/useThrottle"; +} from "../../primitives/SimpleSelect"; export const ScheduleListFilters = z.object({ page: z.coerce.number().default(1), @@ -53,6 +52,9 @@ export function ScheduleFilters({ possibleEnvironments, possibleTasks }: Schedul Object.fromEntries(searchParams.entries()) ); + const hasFilters = + searchParams.has("tasks") || searchParams.has("environments") || searchParams.has("search"); + const handleFilterChange = useCallback((filterType: string, value: string | undefined) => { if (value) { searchParams.set(filterType, value); @@ -146,7 +148,7 @@ export function ScheduleFilters({ possibleEnvironments, possibleTasks }: Schedul {task} @@ -156,7 +158,11 @@ export function ScheduleFilters({ possibleEnvironments, possibleTasks }: Schedul - + )}
); } diff --git a/apps/webapp/app/components/runs/v3/TaskRunAttemptStatus.tsx b/apps/webapp/app/components/runs/v3/TaskRunAttemptStatus.tsx index 557b62e085..de0fa038b1 100644 --- a/apps/webapp/app/components/runs/v3/TaskRunAttemptStatus.tsx +++ b/apps/webapp/app/components/runs/v3/TaskRunAttemptStatus.tsx @@ -7,7 +7,7 @@ import { XCircleIcon, } from "@heroicons/react/20/solid"; import type { TaskRunAttemptStatus as TaskRunAttemptStatusType } from "@trigger.dev/database"; -import { TaskRunAttemptStatus } from "@trigger.dev/database"; +import { TaskRunAttemptStatus } from "~/database-types"; import assertNever from "assert-never"; import { SnowflakeIcon } from "lucide-react"; import { Spinner } from "~/components/primitives/Spinner"; @@ -17,7 +17,7 @@ export const allTaskRunAttemptStatuses = Object.values( TaskRunAttemptStatus ) as TaskRunAttemptStatusType[]; -export type ExtendedTaskAttemptStatus = (typeof allTaskRunAttemptStatuses)[number] | "ENQUEUED"; +export type ExtendedTaskAttemptStatus = TaskRunAttemptStatusType | "ENQUEUED"; export function TaskRunAttemptStatusCombo({ status, diff --git a/apps/webapp/app/components/runs/v3/TaskRunStatus.tsx b/apps/webapp/app/components/runs/v3/TaskRunStatus.tsx index 17b4a317b6..1a49eb8ed0 100644 --- a/apps/webapp/app/components/runs/v3/TaskRunStatus.tsx +++ b/apps/webapp/app/components/runs/v3/TaskRunStatus.tsx @@ -15,6 +15,21 @@ import { SnowflakeIcon } from "lucide-react"; import { Spinner } from "~/components/primitives/Spinner"; import { cn } from "~/utils/cn"; +export const allTaskRunStatuses = [ + "WAITING_FOR_DEPLOY", + "PENDING", + "EXECUTING", + "RETRYING_AFTER_FAILURE", + "WAITING_TO_RESUME", + "COMPLETED_SUCCESSFULLY", + "CANCELED", + "COMPLETED_WITH_ERRORS", + "CRASHED", + "PAUSED", + "INTERRUPTED", + "SYSTEM_FAILURE", +] as const satisfies Readonly>; + const taskRunStatusDescriptions: Record = { PENDING: "Task is waiting to be executed", WAITING_FOR_DEPLOY: "Task needs to be deployed first to start executing", diff --git a/apps/webapp/app/components/runs/v3/TaskRunsTable.tsx b/apps/webapp/app/components/runs/v3/TaskRunsTable.tsx index 3e30b63253..7d0e526594 100644 --- a/apps/webapp/app/components/runs/v3/TaskRunsTable.tsx +++ b/apps/webapp/app/components/runs/v3/TaskRunsTable.tsx @@ -28,6 +28,11 @@ import { CancelRunDialog } from "./CancelRunDialog"; import { ReplayRunDialog } from "./ReplayRunDialog"; import { TaskRunStatusCombo } from "./TaskRunStatus"; import { LiveTimer } from "./LiveTimer"; +import { useSelectedItems } from "~/components/primitives/SelectedItemsProvider"; +import { Checkbox } from "~/components/primitives/Checkbox"; +import { useCallback, useRef } from "react"; +import { run } from "@remix-run/dev/dist/cli/run"; +import { formatNumber } from "~/utils/numberFormatter"; type RunsTableProps = { total: number; @@ -36,6 +41,7 @@ type RunsTableProps = { showJob?: boolean; runs: RunListItem[]; isLoading?: boolean; + allowSelection?: boolean; }; export function TaskRunsTable({ @@ -44,15 +50,66 @@ export function TaskRunsTable({ filters, runs, isLoading = false, + allowSelection = false, }: RunsTableProps) { const organization = useOrganization(); const project = useProject(); + const checkboxes = useRef<(HTMLInputElement | null)[]>([]); + const { selectedItems, has, hasAll, select, deselect, toggle } = useSelectedItems(allowSelection); + + const navigateCheckboxes = useCallback( + (event: React.KeyboardEvent, index: number) => { + //indexes are out by one because of the header row + if (event.key === "ArrowUp" && index > 0) { + checkboxes.current[index - 1]?.focus(); + + if (event.shiftKey) { + const oldItem = runs.at(index - 1); + const newItem = runs.at(index - 2); + const itemsIds = [oldItem?.id, newItem?.id].filter(Boolean); + select(itemsIds); + } + } else if (event.key === "ArrowDown" && index < checkboxes.current.length - 1) { + checkboxes.current[index + 1]?.focus(); + + if (event.shiftKey) { + const oldItem = runs.at(index - 1); + const newItem = runs.at(index); + const itemsIds = [oldItem?.id, newItem?.id].filter(Boolean); + select(itemsIds); + } + } + }, + [checkboxes, runs] + ); return ( - Run + {allowSelection && ( + + {runs.length > 0 && ( + r.id))} + onChange={(element) => { + const ids = runs.map((r) => r.id); + const checked = element.currentTarget.checked; + if (checked) { + select(ids); + } else { + deselect(ids); + } + }} + ref={(r) => { + checkboxes.current[0] = r; + }} + onKeyDown={(event) => navigateCheckboxes(event, 0)} + /> + )} + + )} + Run # Task ID Version Env @@ -74,11 +131,27 @@ export function TaskRunsTable({ ) : runs.length === 0 ? ( ) : ( - runs.map((run) => { + runs.map((run, index) => { const path = v3RunSpanPath(organization, project, run, { spanId: run.spanId }); return ( - #{run.number} + {allowSelection && ( + + { + toggle(run.id); + }} + ref={(r) => { + checkboxes.current[index + 1] = r; + }} + onKeyDown={(event) => navigateCheckboxes(event, index + 1)} + /> + + )} + + {formatNumber(run.number)} + {run.taskIdentifier} {run.version ?? "–"} @@ -194,7 +267,7 @@ function BlankState({ isLoading, filters }: Pick env.id === filters.environments[0]); return ( - +
There are no runs for {filters.tasks[0]} @@ -235,8 +308,21 @@ function BlankState({ isLoading, filters }: Pick - + +
+ + No runs currently match your filters. Try refreshing or modifying your filters. + + +
); } diff --git a/apps/webapp/app/consts.ts b/apps/webapp/app/consts.ts index 187e31e0f4..30dba9c7f6 100644 --- a/apps/webapp/app/consts.ts +++ b/apps/webapp/app/consts.ts @@ -12,3 +12,4 @@ export const MAX_RUN_CHUNK_EXECUTION_LIMIT = 120000; // 2 minutes export const VERCEL_RESPONSE_TIMEOUT_STATUS_CODES = [408, 504]; export const MAX_BATCH_TRIGGER_ITEMS = 100; export const MAX_TASK_RUN_ATTEMPTS = 250; +export const BULK_ACTION_RUN_LIMIT = 250; diff --git a/apps/webapp/app/database-types.ts b/apps/webapp/app/database-types.ts new file mode 100644 index 0000000000..9afc5a5d49 --- /dev/null +++ b/apps/webapp/app/database-types.ts @@ -0,0 +1,68 @@ +// There's a weird issue with importing values from the prisma client +// when using Remix Vite + pnpm + prisma +// As long as they're only used as types it's ok +// Import types here and validate hardcoded enums + +import type { + BatchTaskRunItemStatus as BatchTaskRunItemStatusType, + TaskRunAttemptStatus as TaskRunAttemptStatusType, + TaskRunStatus as TaskRunStatusType, + JobRunStatus as JobRunStatusType, + RuntimeEnvironmentType as RuntimeEnvironmentTypeType, +} from "@trigger.dev/database"; + +export const BatchTaskRunItemStatus = { + PENDING: "PENDING", + FAILED: "FAILED", + CANCELED: "CANCELED", + COMPLETED: "COMPLETED", +} as const satisfies Record; + +export const TaskRunAttemptStatus = { + PENDING: "PENDING", + EXECUTING: "EXECUTING", + PAUSED: "PAUSED", + FAILED: "FAILED", + CANCELED: "CANCELED", + COMPLETED: "COMPLETED", +} as const satisfies Record; + +export const TaskRunStatus = { + PENDING: "PENDING", + WAITING_FOR_DEPLOY: "WAITING_FOR_DEPLOY", + EXECUTING: "EXECUTING", + WAITING_TO_RESUME: "WAITING_TO_RESUME", + RETRYING_AFTER_FAILURE: "RETRYING_AFTER_FAILURE", + PAUSED: "PAUSED", + CANCELED: "CANCELED", + INTERRUPTED: "INTERRUPTED", + COMPLETED_SUCCESSFULLY: "COMPLETED_SUCCESSFULLY", + COMPLETED_WITH_ERRORS: "COMPLETED_WITH_ERRORS", + SYSTEM_FAILURE: "SYSTEM_FAILURE", + CRASHED: "CRASHED", +} as const satisfies Record; + +export const JobRunStatus = { + PENDING: "PENDING", + QUEUED: "QUEUED", + WAITING_ON_CONNECTIONS: "WAITING_ON_CONNECTIONS", + PREPROCESSING: "PREPROCESSING", + STARTED: "STARTED", + EXECUTING: "EXECUTING", + WAITING_TO_CONTINUE: "WAITING_TO_CONTINUE", + WAITING_TO_EXECUTE: "WAITING_TO_EXECUTE", + SUCCESS: "SUCCESS", + FAILURE: "FAILURE", + TIMED_OUT: "TIMED_OUT", + ABORTED: "ABORTED", + CANCELED: "CANCELED", + UNRESOLVED_AUTH: "UNRESOLVED_AUTH", + INVALID_PAYLOAD: "INVALID_PAYLOAD", +} as const satisfies Record; + +export const RuntimeEnvironmentType = { + PRODUCTION: "PRODUCTION", + STAGING: "STAGING", + DEVELOPMENT: "DEVELOPMENT", + PREVIEW: "PREVIEW", +} as const satisfies Record; diff --git a/apps/webapp/app/entry.server.tsx b/apps/webapp/app/entry.server.tsx index 7893f01ab2..f19d8f83ad 100644 --- a/apps/webapp/app/entry.server.tsx +++ b/apps/webapp/app/entry.server.tsx @@ -174,6 +174,10 @@ Worker.init().catch((error) => { function logError(error: unknown, request?: Request) { console.error(error); + + if (error instanceof Error && error.message.startsWith("There are locked jobs present")) { + console.log("⚠️ graphile-worker migration issue detected!"); + } } const sqsEventConsumer = singleton("sqsEventConsumer", getSharedSqsEventConsumer); diff --git a/apps/webapp/app/env.server.ts b/apps/webapp/app/env.server.ts index 4d53c4bb1d..80dfa3ee15 100644 --- a/apps/webapp/app/env.server.ts +++ b/apps/webapp/app/env.server.ts @@ -47,6 +47,8 @@ const EnvironmentSchema = z.object({ WORKER_SCHEMA: z.string().default("graphile_worker"), WORKER_CONCURRENCY: z.coerce.number().int().default(10), WORKER_POLL_INTERVAL: z.coerce.number().int().default(1000), + /** The number of days a failed Graphile task should stay before getting cleaned up */ + WORKER_CLEANUP_TTL_DAYS: z.coerce.number().int().default(3), EXECUTION_WORKER_CONCURRENCY: z.coerce.number().int().default(10), EXECUTION_WORKER_POLL_INTERVAL: z.coerce.number().int().default(1000), WORKER_ENABLED: z.string().default("true"), @@ -153,6 +155,15 @@ const EnvironmentSchema = z.object({ INTERNAL_OTEL_TRACE_SAMPLING_RATE: z.string().default("20"), INTERNAL_OTEL_TRACE_INSTRUMENT_PRISMA_ENABLED: z.string().default("0"), INTERNAL_OTEL_TRACE_DISABLED: z.string().default("0"), + + ORG_SLACK_INTEGRATION_CLIENT_ID: z.string().optional(), + ORG_SLACK_INTEGRATION_CLIENT_SECRET: z.string().optional(), + + /** These enable the alerts feature in v3 */ + ALERT_FROM_EMAIL: z.string().optional(), + ALERT_RESEND_API_KEY: z.string().optional(), + + MAX_SEQUENTIAL_INDEX_FAILURE_COUNT: z.coerce.number().default(96), }); export type Environment = z.infer; diff --git a/apps/webapp/app/features.server.ts b/apps/webapp/app/features.server.ts index 505fa470f4..a5f11e79b2 100644 --- a/apps/webapp/app/features.server.ts +++ b/apps/webapp/app/features.server.ts @@ -4,6 +4,7 @@ import { requestUrl } from "./utils/requestUrl.server"; export type TriggerFeatures = { isManagedCloud: boolean; v3Enabled: boolean; + alertsEnabled: boolean; }; // If the request host is cloud.trigger.dev then we are on the managed cloud @@ -20,5 +21,6 @@ export function featuresForRequest(request: Request): TriggerFeatures { return { isManagedCloud, v3Enabled: env.V3_ENABLED === "true", + alertsEnabled: env.ALERT_FROM_EMAIL !== undefined && env.ALERT_RESEND_API_KEY !== undefined, }; } diff --git a/apps/webapp/app/hooks/useFeatures.ts b/apps/webapp/app/hooks/useFeatures.ts index 6a726d74b1..9882f432d9 100644 --- a/apps/webapp/app/hooks/useFeatures.ts +++ b/apps/webapp/app/hooks/useFeatures.ts @@ -5,5 +5,5 @@ import type { TriggerFeatures } from "~/features.server"; export function useFeatures(): TriggerFeatures { const routeMatch = useTypedRouteLoaderData("root"); - return routeMatch?.features ?? { isManagedCloud: false, v3Enabled: false }; + return routeMatch?.features ?? { isManagedCloud: false, v3Enabled: false, alertsEnabled: false }; } diff --git a/apps/webapp/app/hooks/useFilterTasks.ts b/apps/webapp/app/hooks/useFilterTasks.ts new file mode 100644 index 0000000000..6bb64a9d8b --- /dev/null +++ b/apps/webapp/app/hooks/useFilterTasks.ts @@ -0,0 +1,43 @@ +import { useTextFilter } from "./useTextFilter"; + +type Task = { + id: string; + friendlyId: string; + taskIdentifier: string; + exportName: string; + filePath: string; + triggerSource: string; +}; + +export function useFilterTasks({ tasks }: { tasks: T[] }) { + return useTextFilter({ + items: tasks, + filter: (task, text) => { + if (task.taskIdentifier.toLowerCase().includes(text.toLowerCase())) { + return true; + } + + if (task.exportName.toLowerCase().includes(text.toLowerCase())) { + return true; + } + + if (task.filePath.toLowerCase().includes(text.toLowerCase())) { + return true; + } + + if (task.id.toLowerCase().includes(text.toLowerCase())) { + return true; + } + + if (task.friendlyId.toLowerCase().includes(text.toLowerCase())) { + return true; + } + + if (task.triggerSource === "SCHEDULED" && "scheduled".includes(text.toLowerCase())) { + return true; + } + + return false; + }, + }); +} diff --git a/apps/webapp/app/hooks/useSearchParam.ts b/apps/webapp/app/hooks/useSearchParam.ts new file mode 100644 index 0000000000..982b51ce64 --- /dev/null +++ b/apps/webapp/app/hooks/useSearchParam.ts @@ -0,0 +1,76 @@ +import { useNavigate } from "@remix-run/react"; +import { useOptimisticLocation } from "./useOptimisticLocation"; +import { useCallback } from "react"; + +type Values = Record; + +export function useSearchParams() { + const navigate = useNavigate(); + const location = useOptimisticLocation(); + const search = new URLSearchParams(location.search); + + const set = useCallback( + (values: Values) => { + for (const [param, value] of Object.entries(values)) { + if (value === undefined) { + search.delete(param); + continue; + } + + if (typeof value === "string") { + search.set(param, value); + continue; + } + + search.delete(param); + for (const v of value) { + search.append(param, v); + } + } + }, + [location, search] + ); + + const replace = useCallback( + (values: Values) => { + set(values); + navigate(`${location.pathname}?${search.toString()}`, { replace: true }); + }, + [location, search] + ); + + const del = useCallback( + (keys: string | string[]) => { + if (!Array.isArray(keys)) { + keys = [keys]; + } + for (const key of keys) { + search.delete(key); + } + navigate(`${location.pathname}?${search.toString()}`, { replace: true }); + }, + [location, search] + ); + + const value = useCallback( + (param: string) => { + return search.get(param) ?? undefined; + }, + [location, search] + ); + + const values = useCallback( + (param: string) => { + return search.getAll(param); + }, + [location, search] + ); + + return { + value, + values, + set, + replace, + del, + }; +} diff --git a/apps/webapp/app/hooks/useShortcutKeys.tsx b/apps/webapp/app/hooks/useShortcutKeys.tsx index 092e163967..721dbeea18 100644 --- a/apps/webapp/app/hooks/useShortcutKeys.tsx +++ b/apps/webapp/app/hooks/useShortcutKeys.tsx @@ -17,16 +17,22 @@ export type ShortcutDefinition = | Shortcut; type useShortcutKeysProps = { - shortcut: ShortcutDefinition; + shortcut: ShortcutDefinition | undefined; action: (event: KeyboardEvent) => void; disabled?: boolean; enabledOnInputElements?: boolean; }; -export function useShortcutKeys({ shortcut, action, disabled = false }: useShortcutKeysProps) { +export function useShortcutKeys({ + shortcut, + action, + disabled = false, + enabledOnInputElements, +}: useShortcutKeysProps) { const { platform } = useOperatingSystem(); const isMac = platform === "mac"; - const relevantShortcut = "mac" in shortcut ? (isMac ? shortcut.mac : shortcut.windows) : shortcut; + const relevantShortcut = + shortcut && "mac" in shortcut ? (isMac ? shortcut.mac : shortcut.windows) : shortcut; const keys = createKeysFromShortcut(relevantShortcut); useHotkeys( @@ -36,13 +42,17 @@ export function useShortcutKeys({ shortcut, action, disabled = false }: useShort }, { enabled: !disabled, - enableOnFormTags: relevantShortcut.enabledOnInputElements, - enableOnContentEditable: relevantShortcut.enabledOnInputElements, + enableOnFormTags: enabledOnInputElements ?? relevantShortcut?.enabledOnInputElements, + enableOnContentEditable: enabledOnInputElements ?? relevantShortcut?.enabledOnInputElements, } ); } -function createKeysFromShortcut(shortcut: Shortcut) { +function createKeysFromShortcut(shortcut: Shortcut | undefined) { + if (!shortcut) { + return []; + } + const modifiers = shortcut.modifiers; const character = shortcut.key; diff --git a/apps/webapp/app/models/api-key.server.ts b/apps/webapp/app/models/api-key.server.ts index a54fdae9a1..0389782d8c 100644 --- a/apps/webapp/app/models/api-key.server.ts +++ b/apps/webapp/app/models/api-key.server.ts @@ -1,6 +1,7 @@ -import { RuntimeEnvironmentType, type RuntimeEnvironment } from "@trigger.dev/database"; +import type { RuntimeEnvironment } from "@trigger.dev/database"; import { prisma } from "~/db.server"; import { customAlphabet } from "nanoid"; +import { RuntimeEnvironmentType } from "~/database-types"; const apiKeyId = customAlphabet( "1234567890abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ", diff --git a/apps/webapp/app/models/orgIntegration.server.ts b/apps/webapp/app/models/orgIntegration.server.ts new file mode 100644 index 0000000000..613a2eeb51 --- /dev/null +++ b/apps/webapp/app/models/orgIntegration.server.ts @@ -0,0 +1,236 @@ +import { WebClient } from "@slack/web-api"; +import { + IntegrationService, + Organization, + OrganizationIntegration, + SecretReference, +} from "@trigger.dev/database"; +import { z } from "zod"; +import { $transaction, prisma } from "~/db.server"; +import { env } from "~/env.server"; +import { logger } from "~/services/logger.server"; +import { getSecretStore } from "~/services/secrets/secretStore.server"; +import { commitSession, getUserSession } from "~/services/sessionStorage.server"; +import { generateFriendlyId } from "~/v3/friendlyIdentifiers"; + +const SlackSecretSchema = z.object({ + botAccessToken: z.string(), + userAccessToken: z.string().optional(), + expiresIn: z.number().optional(), + refreshToken: z.string().optional(), + botScopes: z.array(z.string()).optional(), + userScopes: z.array(z.string()).optional(), + raw: z.record(z.any()).optional(), +}); + +type SlackSecret = z.infer; + +const REDIRECT_AFTER_AUTH_KEY = "redirect-back-after-auth"; + +export type OrganizationIntegrationForService = Omit< + AuthenticatableIntegration, + "service" +> & { + service: TService; +}; + +type AuthenticatedClientOptions = TService extends "SLACK" + ? { + forceBotToken?: boolean; + } + : undefined; + +type AuthenticatedClientForIntegration = + TService extends "SLACK" ? InstanceType : never; + +export type AuthenticatableIntegration = OrganizationIntegration & { + tokenReference: SecretReference; +}; + +export class OrgIntegrationRepository { + static async getAuthenticatedClientForIntegration( + integration: OrganizationIntegrationForService, + options?: AuthenticatedClientOptions + ): Promise> { + const secretStore = getSecretStore(integration.tokenReference.provider); + + switch (integration.service) { + case "SLACK": { + const secret = await secretStore.getSecret( + SlackSecretSchema, + integration.tokenReference.key + ); + + if (!secret) { + throw new Error("Failed to get access token"); + } + + // TODO refresh access token here + return new WebClient( + options?.forceBotToken + ? secret.botAccessToken + : secret.userAccessToken ?? secret.botAccessToken + ) as AuthenticatedClientForIntegration; + } + default: { + throw new Error(`Unsupported service ${integration.service}`); + } + } + } + + static isSlackSupported = + !!env.ORG_SLACK_INTEGRATION_CLIENT_ID && !!env.ORG_SLACK_INTEGRATION_CLIENT_SECRET; + + static slackAuthorizationUrl( + state: string, + scopes: string[] = [ + "channels:read", + "groups:read", + "im:read", + "mpim:read", + "chat:write", + "chat:write.public", + ], + userScopes: string[] = ["channels:read", "groups:read", "im:read", "mpim:read", "chat:write"] + ) { + return `https://slack.com/oauth/v2/authorize?client_id=${ + env.ORG_SLACK_INTEGRATION_CLIENT_ID + }&scope=${scopes.join(",")}&user_scope=${userScopes.join(",")}&state=${state}&redirect_uri=${ + env.APP_ORIGIN + }/integrations/slack/callback`; + } + + static async redirectToAuthService( + service: IntegrationService, + state: string, + request: Request, + redirectTo: string + ) { + const session = await getUserSession(request); + session.set(REDIRECT_AFTER_AUTH_KEY, redirectTo); + + const authUrl = service === "SLACK" ? this.slackAuthorizationUrl(state) : undefined; + + if (!authUrl) { + throw new Response("Unsupported service", { status: 400 }); + } + + logger.debug("Redirecting to auth service", { + service, + authUrl, + redirectTo, + }); + + return new Response(null, { + status: 302, + headers: { + location: authUrl, + "Set-Cookie": await commitSession(session), + }, + }); + } + + static async redirectAfterAuth(request: Request) { + const session = await getUserSession(request); + + logger.debug("Redirecting back after auth", { + sessionData: session.data, + }); + + const redirectTo = session.get(REDIRECT_AFTER_AUTH_KEY); + + if (!redirectTo) { + throw new Response("Invalid redirect", { status: 400 }); + } + + session.unset(REDIRECT_AFTER_AUTH_KEY); + + return new Response(null, { + status: 302, + headers: { + location: redirectTo, + "Set-Cookie": await commitSession(session), + }, + }); + } + + static async createOrgIntegration(serviceName: string, code: string, org: Organization) { + switch (serviceName) { + case "slack": { + if (!env.ORG_SLACK_INTEGRATION_CLIENT_ID || !env.ORG_SLACK_INTEGRATION_CLIENT_SECRET) { + throw new Error("Slack integration not configured"); + } + + const client = new WebClient(); + + const result = await client.oauth.v2.access({ + client_id: env.ORG_SLACK_INTEGRATION_CLIENT_ID, + client_secret: env.ORG_SLACK_INTEGRATION_CLIENT_SECRET, + code, + redirect_uri: `${env.APP_ORIGIN}/integrations/slack/callback`, + }); + + if (result.ok) { + logger.debug("Received slack access token", { + result, + }); + + if (!result.access_token) { + throw new Error("Failed to get access token"); + } + + return await $transaction(prisma, async (tx) => { + const secretStore = getSecretStore("DATABASE", { + prismaClient: tx, + }); + + const integrationFriendlyId = generateFriendlyId("org_integration"); + + const secretValue: SlackSecret = { + botAccessToken: result.access_token!, + userAccessToken: result.authed_user ? result.authed_user.access_token : undefined, + expiresIn: result.expires_in, + refreshToken: result.refresh_token, + botScopes: result.scope ? result.scope.split(",") : [], + userScopes: result.authed_user?.scope ? result.authed_user.scope.split(",") : [], + raw: result, + }; + + logger.debug("Setting secret", { + secretValue, + }); + + await secretStore.setSecret(integrationFriendlyId, secretValue); + + const reference = await tx.secretReference.create({ + data: { + provider: "DATABASE", + key: integrationFriendlyId, + }, + }); + + return await tx.organizationIntegration.create({ + data: { + friendlyId: integrationFriendlyId, + organizationId: org.id, + service: "SLACK", + tokenReferenceId: reference.id, + integrationData: { + team: result.team, + user: result.authed_user + ? { + id: result.authed_user.id, + } + : undefined, + } as any, + }, + }); + }); + } + } + default: { + throw new Error(`Service ${serviceName} not supported`); + } + } + } +} diff --git a/apps/webapp/app/models/projectAlert.server.ts b/apps/webapp/app/models/projectAlert.server.ts new file mode 100644 index 0000000000..cd3af4004f --- /dev/null +++ b/apps/webapp/app/models/projectAlert.server.ts @@ -0,0 +1,33 @@ +import { z } from "zod"; +import { EncryptedSecretValueSchema } from "~/services/secrets/secretStore.server"; + +export const ProjectAlertWebhookProperties = z.object({ + secret: EncryptedSecretValueSchema, + url: z.string(), +}); + +export type ProjectAlertWebhookProperties = z.infer; + +export const ProjectAlertEmailProperties = z.object({ + email: z.string(), +}); + +export type ProjectAlertEmailProperties = z.infer; + +export const DeleteProjectAlertChannel = z.object({ + id: z.string(), +}); + +export const ProjectAlertSlackProperties = z.object({ + channelId: z.string(), + channelName: z.string(), + integrationId: z.string().nullish(), +}); + +export type ProjectAlertSlackProperties = z.infer; + +export const ProjectAlertSlackStorage = z.object({ + message_ts: z.string(), +}); + +export type ProjectAlertSlackStorage = z.infer; diff --git a/apps/webapp/app/models/taskRun.server.ts b/apps/webapp/app/models/taskRun.server.ts index 0e2eccbf18..7922c1aed7 100644 --- a/apps/webapp/app/models/taskRun.server.ts +++ b/apps/webapp/app/models/taskRun.server.ts @@ -1,17 +1,20 @@ -import { - TaskRunError, +import type { TaskRunExecutionResult, TaskRunFailedExecutionResult, TaskRunSuccessfulExecutionResult, } from "@trigger.dev/core/v3"; -import { - BatchTaskRunItemStatus, +import { TaskRunError } from "@trigger.dev/core/v3"; + +import type { TaskRun, TaskRunAttempt, - TaskRunAttemptStatus, - TaskRunStatus, + TaskRunAttemptStatus as TaskRunAttemptStatusType, + TaskRunStatus as TaskRunStatusType, + BatchTaskRunItemStatus as BatchTaskRunItemStatusType, } from "@trigger.dev/database"; + import { assertNever } from "assert-never"; +import { BatchTaskRunItemStatus, TaskRunAttemptStatus, TaskRunStatus } from "~/database-types"; import { logger } from "~/services/logger.server"; const SUCCESSFUL_STATUSES = [TaskRunStatus.COMPLETED_SUCCESSFULLY]; @@ -104,7 +107,9 @@ export function executionResultForTaskRun( } } -export function batchTaskRunItemStatusForRunStatus(status: TaskRunStatus): BatchTaskRunItemStatus { +export function batchTaskRunItemStatusForRunStatus( + status: TaskRunStatusType +): BatchTaskRunItemStatusType { switch (status) { case TaskRunStatus.COMPLETED_SUCCESSFULLY: return BatchTaskRunItemStatus.COMPLETED; @@ -113,7 +118,6 @@ export function batchTaskRunItemStatusForRunStatus(status: TaskRunStatus): Batch case TaskRunStatus.COMPLETED_WITH_ERRORS: case TaskRunStatus.SYSTEM_FAILURE: case TaskRunStatus.CRASHED: - case TaskRunStatus.COMPLETED_WITH_ERRORS: return BatchTaskRunItemStatus.FAILED; case TaskRunStatus.PENDING: case TaskRunStatus.WAITING_FOR_DEPLOY: diff --git a/apps/webapp/app/platform/zodWorker.server.ts b/apps/webapp/app/platform/zodWorker.server.ts index 6c94d41873..6bf9a6a64a 100644 --- a/apps/webapp/app/platform/zodWorker.server.ts +++ b/apps/webapp/app/platform/zodWorker.server.ts @@ -1,20 +1,21 @@ import type { CronItem, CronItemOptions, - Job as GraphileJob, + DbJob as GraphileJob, Runner as GraphileRunner, JobHelpers, RunnerOptions, Task, TaskList, TaskSpec, + WorkerUtils, } from "graphile-worker"; -import { run as graphileRun, parseCronItems } from "graphile-worker"; +import { run as graphileRun, makeWorkerUtils, parseCronItems } from "graphile-worker"; import { SpanKind, trace } from "@opentelemetry/api"; import omit from "lodash.omit"; import { z } from "zod"; -import { PrismaClient, PrismaClientOrTransaction } from "~/db.server"; +import { $replica, PrismaClient, PrismaClientOrTransaction } from "~/db.server"; import { PgListenService } from "~/services/db/pgListen.server"; import { workerLogger as logger } from "~/services/logger.server"; import { flattenAttributes } from "@trigger.dev/core/v3"; @@ -34,8 +35,8 @@ const RawCronPayloadSchema = z.object({ const GraphileJobSchema = z.object({ id: z.coerce.string(), - queue_name: z.string().nullable(), - task_identifier: z.string(), + job_queue_id: z.number().nullable(), + task_id: z.number(), payload: z.unknown(), priority: z.number(), run_at: z.coerce.date(), @@ -72,7 +73,7 @@ type RecurringTaskPayload = { export type ZodRecurringTasks = { [key: string]: { - pattern: string; + match: string; options?: CronItemOptions; handler: (payload: RecurringTaskPayload, job: GraphileJob) => Promise; }; @@ -129,6 +130,7 @@ export class ZodWorker { #rateLimiter?: ZodWorkerRateLimiter; #shutdownTimeoutInMs?: number; #shuttingDown = false; + #workerUtils?: WorkerUtils; constructor(options: ZodWorkerOptions) { this.#name = options.name; @@ -158,6 +160,8 @@ export class ZodWorker { const parsedCronItems = parseCronItems(this.#createCronItemsFromRecurringTasks()); + this.#workerUtils = await makeWorkerUtils(this.#runnerOptions); + this.#runner = await graphileRun({ ...this.#runnerOptions, noHandleSignals: true, @@ -188,7 +192,7 @@ export class ZodWorker { this.#logDebug("Detected incoming migration", { latestMigration }); if (latestMigration > 10) { - // already migrated past v0.14 - nothing to do + this.#logDebug("Already migrated past v0.14 - nothing to do", { latestMigration }); return; } @@ -263,6 +267,7 @@ export class ZodWorker { public async stop() { await this.#runner?.stop(); + await this.#workerUtils?.release(); } public async enqueue( @@ -442,12 +447,29 @@ export class ZodWorker { return taskList; } + async #getQueueName(queueId: number | null) { + if (queueId === null) { + return; + } + + const schema = z.array(z.object({ queue_name: z.string() })); + + const rawQueueNameResults = await $replica.$queryRawUnsafe( + `SELECT queue_name FROM ${this.graphileWorkerSchema}._private_job_queues WHERE id = $1`, + queueId + ); + + const queueNameResults = schema.parse(rawQueueNameResults); + + return queueNameResults[0]?.queue_name; + } + async #rescheduleTask(payload: unknown, helpers: JobHelpers) { this.#logDebug("Rescheduling task", { payload, job: helpers.job }); await this.enqueue(helpers.job.task_identifier, payload, { - runAt: helpers.job.run_at, - queueName: helpers.job.queue_name ?? undefined, + runAt: new Date(Date.now() + 1000 * 10), + queueName: await this.#getQueueName(helpers.job.job_queue_id), priority: helpers.job.priority, jobKey: helpers.job.key ?? undefined, flags: Object.keys(helpers.job.flags ?? []), @@ -460,7 +482,7 @@ export class ZodWorker { if (this.#cleanup) { cronItems.push({ - pattern: this.#cleanup.frequencyExpression, + match: this.#cleanup.frequencyExpression, identifier: CLEANUP_TASK_NAME, task: CLEANUP_TASK_NAME, options: this.#cleanup.taskOptions, @@ -469,7 +491,7 @@ export class ZodWorker { if (this.#reporter) { cronItems.push({ - pattern: "50 * * * *", // Every hour at 50 minutes past the hour + match: "50 * * * *", // Every hour at 50 minutes past the hour identifier: REPORTER_TASK_NAME, task: REPORTER_TASK_NAME, }); @@ -481,7 +503,7 @@ export class ZodWorker { for (const [key, task] of Object.entries(this.#recurringTasks)) { const cronItem: CronItem = { - pattern: task.pattern, + match: task.match, identifier: key, task: key, options: task.options, @@ -529,7 +551,7 @@ export class ZodWorker { attributes: { "job.task_identifier": job.task_identifier, "job.id": job.id, - ...(job.queue_name ? { "job.queue_name": job.queue_name } : {}), + ...(job.job_queue_id ? { "job.queue_id": job.job_queue_id } : {}), ...flattenAttributes(job.payload as Record, "job.payload"), "job.priority": job.priority, "job.run_at": job.run_at.toISOString(), @@ -555,6 +577,17 @@ export class ZodWorker { span.recordException(new Error(String(error))); } + if (job.attempts >= job.max_attempts) { + logger.debug("Job failed after max attempts", { + job, + attempts: job.attempts, + max_attempts: job.max_attempts, + error: error instanceof Error ? error.message : error, + }); + + return; + } + throw error; } finally { span.end(); @@ -599,7 +632,7 @@ export class ZodWorker { attributes: { "job.task_identifier": job.task_identifier, "job.id": job.id, - ...(job.queue_name ? { "job.queue_name": job.queue_name } : {}), + ...(job.job_queue_id ? { "job.queue_id": job.job_queue_id } : {}), ...flattenAttributes(job.payload as Record, "job.payload"), "job.priority": job.priority, "job.run_at": job.run_at.toISOString(), @@ -638,6 +671,10 @@ export class ZodWorker { return; } + if (!this.#workerUtils) { + throw new Error("WorkerUtils need to be initialized before running job cleanup."); + } + const job = helpers.job; logger.debug("Received cleanup task", { @@ -663,23 +700,38 @@ export class ZodWorker { payload, }); - const rawResults = await this.#prisma.$queryRawUnsafe( - `WITH rows AS (SELECT id FROM ${this.graphileWorkerSchema}.jobs WHERE run_at < $1 AND locked_at IS NULL AND max_attempts = attempts LIMIT $2 FOR UPDATE) DELETE FROM ${this.graphileWorkerSchema}.jobs WHERE id IN (SELECT id FROM rows) RETURNING id`, + const rawResults = await $replica.$queryRawUnsafe( + `SELECT id + FROM ${this.graphileWorkerSchema}.jobs + WHERE run_at < $1 + AND locked_at IS NULL + AND max_attempts = attempts + LIMIT $2`, expirationDate, this.#cleanup.maxCount ); - const results = Array.isArray(rawResults) ? rawResults : []; + const results = z + .array( + z.object({ + id: z.coerce.string(), + }) + ) + .parse(rawResults); + + const completedJobs = await this.#workerUtils.completeJobs(results.map((job) => job.id)); logger.debug("Cleaned up old jobs", { - count: results.length, + found: results.length, + deleted: completedJobs.length, expirationDate, payload, }); if (this.#reporter) { await this.#reporter("cleanup_stats", { - count: results.length, + found: results.length, + deleted: completedJobs.length, expirationDate, ts: payload._cron.ts, }); @@ -711,7 +763,7 @@ export class ZodWorker { const schema = z.array(z.object({ count: z.coerce.number() })); // Count the number of jobs that have been added since the startAt date and before the payload._cron.ts date - const rawAddedResults = await this.#prisma.$queryRawUnsafe( + const rawAddedResults = await $replica.$queryRawUnsafe( `SELECT COUNT(*) FROM ${this.graphileWorkerSchema}.jobs WHERE created_at > $1 AND created_at < $2`, startAt, payload._cron.ts @@ -720,7 +772,7 @@ export class ZodWorker { const addedCountResults = schema.parse(rawAddedResults)[0]; // Count the total number of jobs in the jobs table - const rawTotalResults = await this.#prisma.$queryRawUnsafe( + const rawTotalResults = await $replica.$queryRawUnsafe( `SELECT COUNT(*) FROM ${this.graphileWorkerSchema}.jobs` ); diff --git a/apps/webapp/app/presenters/EnvironmentsPresenter.server.ts b/apps/webapp/app/presenters/EnvironmentsPresenter.server.ts index ff0600dcd9..50a22425e8 100644 --- a/apps/webapp/app/presenters/EnvironmentsPresenter.server.ts +++ b/apps/webapp/app/presenters/EnvironmentsPresenter.server.ts @@ -14,7 +14,7 @@ import { IndexEndpointStats, parseEndpointIndexStats, } from "@trigger.dev/core"; -import { sortEnvironments } from "~/services/environmentSort.server"; +import { sortEnvironments } from "~/utils/environmentSort"; export type Client = { slug: string; diff --git a/apps/webapp/app/presenters/HttpEndpointPresenter.server.ts b/apps/webapp/app/presenters/HttpEndpointPresenter.server.ts index 886b4192d8..fe791481ee 100644 --- a/apps/webapp/app/presenters/HttpEndpointPresenter.server.ts +++ b/apps/webapp/app/presenters/HttpEndpointPresenter.server.ts @@ -1,6 +1,6 @@ import { z } from "zod"; import { PrismaClient, prisma } from "~/db.server"; -import { sortEnvironments } from "~/services/environmentSort.server"; +import { sortEnvironments } from "~/utils/environmentSort"; import { httpEndpointUrl } from "~/services/httpendpoint/HandleHttpEndpointService"; import { getSecretStore } from "~/services/secrets/secretStore.server"; import { projectPath } from "~/utils/pathBuilder"; diff --git a/apps/webapp/app/presenters/ProjectPresenter.server.ts b/apps/webapp/app/presenters/ProjectPresenter.server.ts index 7f61825e16..0ca954d8bf 100644 --- a/apps/webapp/app/presenters/ProjectPresenter.server.ts +++ b/apps/webapp/app/presenters/ProjectPresenter.server.ts @@ -2,7 +2,7 @@ import { PrismaClient, prisma } from "~/db.server"; import { Project } from "~/models/project.server"; import { displayableEnvironments } from "~/models/runtimeEnvironment.server"; import { User } from "~/models/user.server"; -import { sortEnvironments } from "~/services/environmentSort.server"; +import { sortEnvironments } from "~/utils/environmentSort"; export class ProjectPresenter { #prismaClient: PrismaClient; diff --git a/apps/webapp/app/presenters/v3/AlertChannelListPresenter.server.ts b/apps/webapp/app/presenters/v3/AlertChannelListPresenter.server.ts new file mode 100644 index 0000000000..01d28b9503 --- /dev/null +++ b/apps/webapp/app/presenters/v3/AlertChannelListPresenter.server.ts @@ -0,0 +1,73 @@ +import { logger } from "~/services/logger.server"; +import { BasePresenter } from "./basePresenter.server"; +import { ProjectAlertChannel } from "@trigger.dev/database"; +import { decryptSecret } from "~/services/secrets/secretStore.server"; +import { env } from "~/env.server"; +import { + ProjectAlertEmailProperties, + ProjectAlertSlackProperties, + ProjectAlertWebhookProperties, +} from "~/models/projectAlert.server"; + +export type AlertChannelListPresenterData = Awaited>; +export type AlertChannelListPresenterRecord = + AlertChannelListPresenterData["alertChannels"][number]; +export type AlertChannelListPresenterAlertProperties = NonNullable< + AlertChannelListPresenterRecord["properties"] +>; + +export class AlertChannelListPresenter extends BasePresenter { + public async call(projectId: string) { + logger.debug("AlertChannelListPresenter", { projectId }); + + const alertChannels = await this._prisma.projectAlertChannel.findMany({ + where: { + projectId, + }, + orderBy: { + createdAt: "desc", + }, + }); + + return { + alertChannels: await Promise.all( + alertChannels.map(async (alertChannel) => ({ + ...alertChannel, + properties: await this.#presentProperties(alertChannel), + })) + ), + }; + } + + async #presentProperties(alertChannel: ProjectAlertChannel) { + if (!alertChannel.properties) { + return; + } + + switch (alertChannel.type) { + case "WEBHOOK": + const parsedProperties = ProjectAlertWebhookProperties.parse(alertChannel.properties); + + const secret = await decryptSecret(env.ENCRYPTION_KEY, parsedProperties.secret); + + return { + type: "WEBHOOK" as const, + url: parsedProperties.url, + secret, + }; + case "EMAIL": + return { + type: "EMAIL" as const, + ...ProjectAlertEmailProperties.parse(alertChannel.properties), + }; + case "SLACK": { + return { + type: "SLACK" as const, + ...ProjectAlertSlackProperties.parse(alertChannel.properties), + }; + } + default: + throw new Error(`Unsupported alert channel type: ${alertChannel.type}`); + } + } +} diff --git a/apps/webapp/app/presenters/v3/ApiAlertChannelPresenter.server.ts b/apps/webapp/app/presenters/v3/ApiAlertChannelPresenter.server.ts new file mode 100644 index 0000000000..c6dd350f5f --- /dev/null +++ b/apps/webapp/app/presenters/v3/ApiAlertChannelPresenter.server.ts @@ -0,0 +1,136 @@ +import { + ProjectAlertChannel, + ProjectAlertChannelType, + ProjectAlertType, +} from "@trigger.dev/database"; +import assertNever from "assert-never"; +import { z } from "zod"; +import { env } from "~/env.server"; +import { + ProjectAlertEmailProperties, + ProjectAlertWebhookProperties, +} from "~/models/projectAlert.server"; +import { decryptSecret } from "~/services/secrets/secretStore.server"; + +export const ApiAlertType = z.enum(["attempt_failure", "deployment_failure", "deployment_success"]); + +export type ApiAlertType = z.infer; + +export const ApiAlertEnvironmentType = z.enum(["STAGING", "PRODUCTION"]); + +export type ApiAlertEnvironmentType = z.infer; + +export const ApiAlertChannel = z.enum(["email", "webhook"]); + +export type ApiAlertChannel = z.infer; + +export const ApiAlertChannelData = z.object({ + email: z.string().optional(), + url: z.string().optional(), + secret: z.string().optional(), +}); + +export type ApiAlertChannelData = z.infer; + +export const ApiCreateAlertChannel = z.object({ + alertTypes: ApiAlertType.array(), + name: z.string(), + channel: ApiAlertChannel, + channelData: ApiAlertChannelData, + deduplicationKey: z.string().optional(), + environmentTypes: ApiAlertEnvironmentType.array().default(["STAGING", "PRODUCTION"]), +}); + +export type ApiCreateAlertChannel = z.infer; + +export const ApiAlertChannelObject = z.object({ + id: z.string(), + name: z.string(), + alertTypes: ApiAlertType.array(), + channel: ApiAlertChannel, + channelData: ApiAlertChannelData, + deduplicationKey: z.string().optional(), +}); + +export type ApiAlertChannelObject = z.infer; + +export class ApiAlertChannelPresenter { + public static async alertChannelToApi( + alertChannel: ProjectAlertChannel + ): Promise { + return { + id: alertChannel.friendlyId, + name: alertChannel.name, + alertTypes: alertChannel.alertTypes.map((type) => this.alertTypeToApi(type)), + channel: this.alertChannelTypeToApi(alertChannel.type), + channelData: await channelDataFromProperties(alertChannel.type, alertChannel.properties), + deduplicationKey: alertChannel.userProvidedDeduplicationKey + ? alertChannel.deduplicationKey + : undefined, + }; + } + + public static alertTypeToApi(alertType: ProjectAlertType): ApiAlertType { + switch (alertType) { + case "TASK_RUN_ATTEMPT": + return "attempt_failure"; + case "DEPLOYMENT_FAILURE": + return "deployment_failure"; + case "DEPLOYMENT_SUCCESS": + return "deployment_success"; + default: + assertNever(alertType); + } + } + + public static alertTypeFromApi(alertType: ApiAlertType): ProjectAlertType { + switch (alertType) { + case "attempt_failure": + return "TASK_RUN_ATTEMPT"; + case "deployment_failure": + return "DEPLOYMENT_FAILURE"; + case "deployment_success": + return "DEPLOYMENT_SUCCESS"; + default: + assertNever(alertType); + } + } + + public static alertChannelTypeToApi(type: ProjectAlertChannelType): ApiAlertChannel { + switch (type) { + case "EMAIL": + return "email"; + case "WEBHOOK": + return "webhook"; + case "SLACK": + throw new Error("Slack channels are not supported"); + default: + assertNever(type); + } + } +} + +async function channelDataFromProperties( + type: ProjectAlertChannelType, + properties: ProjectAlertChannel["properties"] +): Promise { + if (!properties) { + return {}; + } + + switch (type) { + case "EMAIL": + return ProjectAlertEmailProperties.parse(properties); + case "WEBHOOK": + const { url, secret } = ProjectAlertWebhookProperties.parse(properties); + + return { + url, + secret: await decryptSecret(env.ENCRYPTION_KEY, secret), + }; + case "SLACK": + throw new Error("Slack channels are not supported"); + default: + assertNever(type); + } +} diff --git a/apps/webapp/app/presenters/v3/ApiKeysPresenter.server.ts b/apps/webapp/app/presenters/v3/ApiKeysPresenter.server.ts index 406bb13b23..eeb7b9f2e4 100644 --- a/apps/webapp/app/presenters/v3/ApiKeysPresenter.server.ts +++ b/apps/webapp/app/presenters/v3/ApiKeysPresenter.server.ts @@ -1,7 +1,7 @@ import { PrismaClient, prisma } from "~/db.server"; import { Project } from "~/models/project.server"; import { User } from "~/models/user.server"; -import { sortEnvironments } from "~/services/environmentSort.server"; +import { sortEnvironments } from "~/utils/environmentSort"; export class ApiKeysPresenter { #prismaClient: PrismaClient; diff --git a/apps/webapp/app/presenters/v3/DeploymentPresenter.server.ts b/apps/webapp/app/presenters/v3/DeploymentPresenter.server.ts index dcf9cc287a..a920bdd1ff 100644 --- a/apps/webapp/app/presenters/v3/DeploymentPresenter.server.ts +++ b/apps/webapp/app/presenters/v3/DeploymentPresenter.server.ts @@ -144,18 +144,18 @@ export class DeploymentPresenter { userName: getUsername(deployment.environment.orgMember?.user), }, deployedBy: deployment.triggeredBy, - errorData: this.#prepareErrorData(deployment.errorData), sdkVersion: deployment.worker?.sdkVersion, imageReference: deployment.imageReference, externalBuildData: externalBuildData && externalBuildData.success ? externalBuildData.data : undefined, projectId: deployment.projectId, organizationId: project.organizationId, + errorData: DeploymentPresenter.prepareErrorData(deployment.errorData), }, }; } - #prepareErrorData(errorData: WorkerDeployment["errorData"]): ErrorData | undefined { + public static prepareErrorData(errorData: WorkerDeployment["errorData"]): ErrorData | undefined { if (!errorData) { return; } diff --git a/apps/webapp/app/presenters/v3/EnvironmentVariablesPresenter.server.ts b/apps/webapp/app/presenters/v3/EnvironmentVariablesPresenter.server.ts index cff76a3979..bc200dc101 100644 --- a/apps/webapp/app/presenters/v3/EnvironmentVariablesPresenter.server.ts +++ b/apps/webapp/app/presenters/v3/EnvironmentVariablesPresenter.server.ts @@ -1,7 +1,7 @@ import { PrismaClient, prisma } from "~/db.server"; import { Project } from "~/models/project.server"; import { User } from "~/models/user.server"; -import { sortEnvironments } from "~/services/environmentSort.server"; +import { sortEnvironments } from "~/utils/environmentSort"; import { EnvironmentVariablesRepository } from "~/v3/environmentVariables/environmentVariablesRepository.server"; type Result = Awaited>; diff --git a/apps/webapp/app/presenters/v3/NewAlertChannelPresenter.server.ts b/apps/webapp/app/presenters/v3/NewAlertChannelPresenter.server.ts new file mode 100644 index 0000000000..24688d8b78 --- /dev/null +++ b/apps/webapp/app/presenters/v3/NewAlertChannelPresenter.server.ts @@ -0,0 +1,104 @@ +import { + AuthenticatableIntegration, + OrgIntegrationRepository, +} from "~/models/orgIntegration.server"; +import { logger } from "~/services/logger.server"; +import { BasePresenter } from "./basePresenter.server"; +import { WebClient } from "@slack/web-api"; + +export class NewAlertChannelPresenter extends BasePresenter { + public async call(projectId: string) { + const project = await this._prisma.project.findUniqueOrThrow({ + where: { + id: projectId, + }, + }); + + // Find the latest Slack integration + const slackIntegration = await this._prisma.organizationIntegration.findFirst({ + where: { + service: "SLACK", + organizationId: project.organizationId, + }, + orderBy: { + createdAt: "desc", + }, + include: { + tokenReference: true, + }, + }); + + // If there is a slack integration, then we need to get a list of Slack Channels + if (slackIntegration) { + const channels = await getSlackChannelsForToken(slackIntegration); + + return { + slack: { + status: "READY" as const, + channels, + integrationId: slackIntegration.id, + }, + }; + } else { + if (OrgIntegrationRepository.isSlackSupported) { + return { + slack: { + status: "NOT_CONFIGURED" as const, + }, + }; + } else { + return { + slack: { + status: "NOT_AVAILABLE" as const, + }, + }; + } + } + } +} + +async function getSlackChannelsForToken(integration: AuthenticatableIntegration) { + const client = await OrgIntegrationRepository.getAuthenticatedClientForIntegration(integration); + + const channels = await getAllSlackConversations(client); + + logger.debug("Received a list of slack conversations", { + channels, + }); + + return (channels ?? []) + .filter((channel) => !channel.is_archived) + .filter((channel) => channel.is_channel) + .filter((channel) => !channel.is_ext_shared) + .filter((channel) => channel.unlinked === 0) + .filter((channel) => channel.num_members) + .sort((a, b) => a.name!.localeCompare(b.name!)); +} + +type Channels = Awaited>["channels"]; + +async function getSlackConversationsPage(client: WebClient, nextCursor?: string) { + return client.conversations.list({ + types: "public_channel,private_channel", + exclude_archived: true, + cursor: nextCursor, + }); +} + +async function getAllSlackConversations(client: WebClient) { + let nextCursor: string | undefined = undefined; + let channels: Channels = []; + + do { + const response = await getSlackConversationsPage(client, nextCursor); + + if (!response.ok) { + throw new Error(`Failed to get channels: ${response.error}`); + } + + channels = channels.concat(response.channels ?? []); + nextCursor = response.response_metadata?.next_cursor; + } while (nextCursor); + + return channels; +} diff --git a/apps/webapp/app/presenters/v3/RunListPresenter.server.ts b/apps/webapp/app/presenters/v3/RunListPresenter.server.ts index 019f48f0c8..a020b8f7fd 100644 --- a/apps/webapp/app/presenters/v3/RunListPresenter.server.ts +++ b/apps/webapp/app/presenters/v3/RunListPresenter.server.ts @@ -1,4 +1,5 @@ import { Prisma, TaskRunStatus } from "@trigger.dev/database"; +import parse from "parse-duration"; import { Direction } from "~/components/runs/RunStatuses"; import { FINISHED_STATUSES } from "~/components/runs/v3/TaskRunStatus"; import { sqlDatabaseSchema } from "~/db.server"; @@ -15,6 +16,8 @@ type RunListOptions = { statuses?: TaskRunStatus[]; environments?: string[]; scheduleId?: string; + period?: string; + bulkId?: string; from?: number; to?: number; //pagination @@ -23,7 +26,7 @@ type RunListOptions = { pageSize?: number; }; -const DEFAULT_PAGE_SIZE = 20; +const DEFAULT_PAGE_SIZE = 25; export type RunList = Awaited>; export type RunListItem = RunList["runs"][0]; @@ -38,6 +41,8 @@ export class RunListPresenter extends BasePresenter { statuses, environments, scheduleId, + period, + bulkId, from, to, direction = "forward", @@ -47,10 +52,12 @@ export class RunListPresenter extends BasePresenter { const hasStatusFilters = statuses && statuses.length > 0; const hasFilters = - tasks !== undefined || - versions !== undefined || + (tasks !== undefined && tasks.length > 0) || + (versions !== undefined && versions.length > 0) || hasStatusFilters || - environments !== undefined || + (environments !== undefined && environments.length > 0) || + (period !== undefined && period !== "all") || + (bulkId !== undefined && bulkId !== "") || from !== undefined || to !== undefined; @@ -83,13 +90,57 @@ export class RunListPresenter extends BasePresenter { }); //get all possible tasks - const possibleTasks = await this._replica.backgroundWorkerTask.findMany({ + const possibleTasksAsync = this._replica.backgroundWorkerTask.findMany({ distinct: ["slug"], where: { projectId: project.id, }, }); + //get possible bulk actions + const bulkActionsAsync = this._replica.bulkActionGroup.findMany({ + select: { + friendlyId: true, + type: true, + createdAt: true, + }, + where: { + projectId: project.id, + }, + orderBy: { + createdAt: "desc", + }, + take: 20, + }); + + const [possibleTasks, bulkActions] = await Promise.all([possibleTasksAsync, bulkActionsAsync]); + + //we can restrict to specific runs using bulkId, or batchId + let restrictToRunIds: undefined | string[] = undefined; + + //bulk id + if (bulkId) { + const bulkAction = await this._replica.bulkActionGroup.findUnique({ + select: { + items: { + select: { + destinationRunId: true, + }, + }, + }, + where: { + friendlyId: bulkId, + }, + }); + + if (bulkAction) { + const runIds = bulkAction.items.map((item) => item.destinationRunId).filter(Boolean); + restrictToRunIds = runIds; + } + } + + const periodMs = period ? parse(period) : undefined; + //get the runs let runs = await this._replica.$queryRaw< { @@ -136,6 +187,13 @@ export class RunListPresenter extends BasePresenter { : Prisma.empty } -- filters + ${ + restrictToRunIds + ? restrictToRunIds.length === 0 + ? Prisma.sql`AND tr.id = ''` + : Prisma.sql`AND tr.id IN (${Prisma.join(restrictToRunIds)})` + : Prisma.empty + } ${ tasks && tasks.length > 0 ? Prisma.sql`AND tr."taskIdentifier" IN (${Prisma.join(tasks)})` @@ -152,6 +210,11 @@ export class RunListPresenter extends BasePresenter { : Prisma.empty } ${scheduleId ? Prisma.sql`AND tr."scheduleId" = ${scheduleId}` : Prisma.empty} + ${ + periodMs + ? Prisma.sql`AND tr."createdAt" >= NOW() - INTERVAL '1 millisecond' * ${periodMs}` + : Prisma.empty + } ${ from ? Prisma.sql`AND tr."createdAt" >= ${new Date(from).toISOString()}::timestamp` @@ -224,7 +287,16 @@ export class RunListPresenter extends BasePresenter { next, previous, }, - possibleTasks: possibleTasks.map((task) => task.slug), + possibleTasks: possibleTasks + .map((task) => ({ slug: task.slug, triggerSource: task.triggerSource })) + .sort((a, b) => { + return a.slug.localeCompare(b.slug); + }), + bulkActions: bulkActions.map((bulkAction) => ({ + id: bulkAction.friendlyId, + type: bulkAction.type, + createdAt: bulkAction.createdAt, + })), filters: { tasks: tasks || [], versions: versions || [], diff --git a/apps/webapp/app/presenters/v3/TaskListPresenter.server.ts b/apps/webapp/app/presenters/v3/TaskListPresenter.server.ts index e7ac50573a..c6a26f9702 100644 --- a/apps/webapp/app/presenters/v3/TaskListPresenter.server.ts +++ b/apps/webapp/app/presenters/v3/TaskListPresenter.server.ts @@ -1,18 +1,19 @@ -import { - Prisma, +import type { RuntimeEnvironmentType, - TaskRunStatus, TaskTriggerSource, + TaskRunStatus as TaskRunStatusType, } from "@trigger.dev/database"; +import { Prisma } from "@trigger.dev/database"; import { QUEUED_STATUSES, RUNNING_STATUSES } from "~/components/runs/v3/TaskRunStatus"; import { sqlDatabaseSchema } from "~/db.server"; -import { Organization } from "~/models/organization.server"; -import { Project } from "~/models/project.server"; +import type { Organization } from "~/models/organization.server"; +import type { Project } from "~/models/project.server"; import { displayableEnvironments } from "~/models/runtimeEnvironment.server"; -import { User } from "~/models/user.server"; -import { sortEnvironments } from "~/services/environmentSort.server"; +import type { User } from "~/models/user.server"; +import { sortEnvironments } from "~/utils/environmentSort"; import { logger } from "~/services/logger.server"; import { BasePresenter } from "./basePresenter.server"; +import { TaskRunStatus } from "~/database-types"; export type Task = { slug: string; @@ -150,7 +151,7 @@ export class TaskListPresenter extends BasePresenter { const activity = await this._replica.$queryRaw< { taskIdentifier: string; - status: TaskRunStatus; + status: TaskRunStatusType; day: Date; count: BigInt; }[] @@ -193,7 +194,7 @@ export class TaskListPresenter extends BasePresenter { existingTask.push({ day: day.toISOString(), [TaskRunStatus.COMPLETED_SUCCESSFULLY]: 0, - } as { day: string } & Record); + } as { day: string } & Record); } acc[a.taskIdentifier] = existingTask; @@ -214,7 +215,7 @@ export class TaskListPresenter extends BasePresenter { day[a.status] = Number(a.count); return acc; - }, {} as Record)[]>); + }, {} as Record)[]>); } async #getRunningStats(tasks: string[], projectId: string) { @@ -225,7 +226,7 @@ export class TaskListPresenter extends BasePresenter { const statuses = await this._replica.$queryRaw< { taskIdentifier: string; - status: TaskRunStatus; + status: TaskRunStatusType; count: BigInt; }[] >` diff --git a/apps/webapp/app/presenters/v3/TestPresenter.server.ts b/apps/webapp/app/presenters/v3/TestPresenter.server.ts index 130935b2be..cbe2e2456b 100644 --- a/apps/webapp/app/presenters/v3/TestPresenter.server.ts +++ b/apps/webapp/app/presenters/v3/TestPresenter.server.ts @@ -1,7 +1,7 @@ import { TaskTriggerSource } from "@trigger.dev/database"; import { sqlDatabaseSchema, PrismaClient, prisma } from "~/db.server"; import { TestSearchParams } from "~/routes/_app.orgs.$organizationSlug.projects.v3.$projectParam.test/route"; -import { sortEnvironments } from "~/services/environmentSort.server"; +import { sortEnvironments } from "~/utils/environmentSort"; import { createSearchParams } from "~/utils/searchParams"; import { getUsername } from "~/utils/username"; diff --git a/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.environments/FirstEndpointSheet.tsx b/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.environments/FirstEndpointSheet.tsx index 1b741e399c..45dc829097 100644 --- a/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.environments/FirstEndpointSheet.tsx +++ b/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.environments/FirstEndpointSheet.tsx @@ -27,7 +27,7 @@ import { SelectItem, SelectTrigger, SelectValue, -} from "~/components/primitives/Select"; +} from "~/components/primitives/SimpleSelect"; import { EnvironmentLabel } from "~/components/environments/EnvironmentLabel"; type FirstEndpointSheetProps = { diff --git a/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.jobs.$jobParam.test/route.tsx b/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.jobs.$jobParam.test/route.tsx index b2cff83f72..a4e2902f80 100644 --- a/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.jobs.$jobParam.test/route.tsx +++ b/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.jobs.$jobParam.test/route.tsx @@ -25,7 +25,7 @@ import { SelectItem, SelectTrigger, SelectValue, -} from "~/components/primitives/Select"; +} from "~/components/primitives/SimpleSelect"; import { TextLink } from "~/components/primitives/TextLink"; import { runStatusClassNameColor, runStatusTitle } from "~/components/runs/RunStatuses"; import { redirectBackWithErrorMessage, redirectWithSuccessMessage } from "~/models/message.server"; diff --git a/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.v3.$projectParam._index/route.tsx b/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.v3.$projectParam._index/route.tsx index 5ed4e7ab87..23d6593f42 100644 --- a/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.v3.$projectParam._index/route.tsx +++ b/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.v3.$projectParam._index/route.tsx @@ -11,7 +11,7 @@ import { InitCommandV3, TriggerDevStepV3, TriggerLoginStepV3 } from "~/component import { StepContentContainer } from "~/components/StepContentContainer"; import { AdminDebugTooltip } from "~/components/admin/debugTooltip"; import { InlineCode } from "~/components/code/InlineCode"; -import { EnvironmentLabel } from "~/components/environments/EnvironmentLabel"; +import { EnvironmentLabels } from "~/components/environments/EnvironmentLabel"; import { MainCenteredContainer, PageBody, PageContainer } from "~/components/layout/AppLayout"; import { Button } from "~/components/primitives/Buttons"; import { Callout } from "~/components/primitives/Callout"; @@ -261,15 +261,7 @@ export default function Page() { -
- {task.environments.map((environment) => ( - - ))} -
+
diff --git a/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.v3.$projectParam.alerts.new.connect-to-slack.ts b/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.v3.$projectParam.alerts.new.connect-to-slack.ts new file mode 100644 index 0000000000..2f02562f21 --- /dev/null +++ b/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.v3.$projectParam.alerts.new.connect-to-slack.ts @@ -0,0 +1,50 @@ +import { LoaderFunctionArgs, redirect } from "@remix-run/server-runtime"; +import { prisma } from "~/db.server"; +import { + redirectBackWithSuccessMessage, + redirectWithSuccessMessage, +} from "~/models/message.server"; +import { OrgIntegrationRepository } from "~/models/orgIntegration.server"; +import { findProjectBySlug } from "~/models/project.server"; +import { requireUserId } from "~/services/session.server"; +import { getUserSession } from "~/services/sessionStorage.server"; +import { + ProjectParamSchema, + v3NewProjectAlertPath, + v3NewProjectAlertPathConnectToSlackPath, +} from "~/utils/pathBuilder"; + +export async function loader({ request, params }: LoaderFunctionArgs) { + const userId = await requireUserId(request); + const { organizationSlug, projectParam } = ProjectParamSchema.parse(params); + + const project = await findProjectBySlug(organizationSlug, projectParam, userId); + + if (!project) { + throw new Response("Project not found", { status: 404 }); + } + + // Find an integration for Slack for this org + const integration = await prisma.organizationIntegration.findFirst({ + where: { + service: "SLACK", + organizationId: project.organizationId, + }, + }); + + if (integration) { + return redirectWithSuccessMessage( + `${v3NewProjectAlertPath({ slug: organizationSlug }, project)}?option=slack`, + request, + "Successfully connected your Slack workspace" + ); + } else { + // Redirect to Slack + return await OrgIntegrationRepository.redirectToAuthService( + "SLACK", + project.organizationId, + request, + v3NewProjectAlertPathConnectToSlackPath({ slug: organizationSlug }, project) + ); + } +} diff --git a/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.v3.$projectParam.alerts.new/route.tsx b/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.v3.$projectParam.alerts.new/route.tsx new file mode 100644 index 0000000000..188f8fd190 --- /dev/null +++ b/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.v3.$projectParam.alerts.new/route.tsx @@ -0,0 +1,456 @@ +import { conform, useForm } from "@conform-to/react"; +import { parse } from "@conform-to/zod"; +import { HashtagIcon, LockClosedIcon } from "@heroicons/react/20/solid"; +import { Form, useActionData, useNavigate, useNavigation } from "@remix-run/react"; +import { LoaderFunctionArgs } from "@remix-run/router"; +import { ActionFunctionArgs, json } from "@remix-run/server-runtime"; +import { SlackIcon } from "@trigger.dev/companyicons"; +import { useEffect, useState } from "react"; +import { typedjson, useTypedLoaderData } from "remix-typedjson"; +import { z } from "zod"; +import { InlineCode } from "~/components/code/InlineCode"; +import { Button, LinkButton } from "~/components/primitives/Buttons"; +import { Callout, variantClasses } from "~/components/primitives/Callout"; +import { CheckboxWithLabel } from "~/components/primitives/Checkbox"; +import { Dialog, DialogContent, DialogHeader } from "~/components/primitives/Dialog"; +import { Fieldset } from "~/components/primitives/Fieldset"; +import { FormButtons } from "~/components/primitives/FormButtons"; +import { FormError } from "~/components/primitives/FormError"; +import { Hint } from "~/components/primitives/Hint"; +import { Input } from "~/components/primitives/Input"; +import { InputGroup } from "~/components/primitives/InputGroup"; +import { Label } from "~/components/primitives/Label"; +import { Paragraph } from "~/components/primitives/Paragraph"; +import SegmentedControl from "~/components/primitives/SegmentedControl"; +import { Select, SelectItem } from "~/components/primitives/Select"; +import { InfoIconTooltip } from "~/components/primitives/Tooltip"; +import { useOrganization } from "~/hooks/useOrganizations"; +import { useProject } from "~/hooks/useProject"; +import { redirectWithSuccessMessage } from "~/models/message.server"; +import { findProjectBySlug } from "~/models/project.server"; +import { NewAlertChannelPresenter } from "~/presenters/v3/NewAlertChannelPresenter.server"; +import { requireUserId } from "~/services/session.server"; +import { cn } from "~/utils/cn"; +import { ProjectParamSchema, v3ProjectAlertsPath } from "~/utils/pathBuilder"; +import { + CreateAlertChannelOptions, + CreateAlertChannelService, +} from "~/v3/services/alerts/createAlertChannel.server"; + +const FormSchema = z + .object({ + alertTypes: z + .array(z.enum(["TASK_RUN_ATTEMPT", "DEPLOYMENT_FAILURE", "DEPLOYMENT_SUCCESS"])) + .min(1) + .or(z.enum(["TASK_RUN_ATTEMPT", "DEPLOYMENT_FAILURE", "DEPLOYMENT_SUCCESS"])), + environmentTypes: z + .array(z.enum(["STAGING", "PRODUCTION"])) + .min(1) + .or(z.enum(["STAGING", "PRODUCTION"])), + type: z.enum(["WEBHOOK", "SLACK", "EMAIL"]).default("EMAIL"), + channelValue: z.string().nonempty(), + integrationId: z.string().optional(), + }) + .refine( + (value) => + value.type === "EMAIL" ? z.string().email().safeParse(value.channelValue).success : true, + { + message: "Must be a valid email address", + path: ["channelValue"], + } + ) + .refine( + (value) => + value.type === "WEBHOOK" ? z.string().url().safeParse(value.channelValue).success : true, + { + message: "Must be a valid URL", + path: ["channelValue"], + } + ) + .refine( + (value) => + value.type === "SLACK" + ? typeof value.channelValue === "string" && value.channelValue.startsWith("C") + : true, + { + message: "Must select a Slack channel", + path: ["channelValue"], + } + ); + +function formDataToCreateAlertChannelOptions( + formData: z.infer +): CreateAlertChannelOptions { + switch (formData.type) { + case "WEBHOOK": { + return { + name: `Webhook to ${new URL(formData.channelValue).hostname}`, + alertTypes: Array.isArray(formData.alertTypes) + ? formData.alertTypes + : [formData.alertTypes], + environmentTypes: Array.isArray(formData.environmentTypes) + ? formData.environmentTypes + : [formData.environmentTypes], + channel: { + type: "WEBHOOK", + url: formData.channelValue, + }, + }; + } + case "EMAIL": { + return { + name: `Email to ${formData.channelValue}`, + alertTypes: Array.isArray(formData.alertTypes) + ? formData.alertTypes + : [formData.alertTypes], + environmentTypes: Array.isArray(formData.environmentTypes) + ? formData.environmentTypes + : [formData.environmentTypes], + channel: { + type: "EMAIL", + email: formData.channelValue, + }, + }; + } + case "SLACK": { + const [channelId, channelName] = formData.channelValue.split("/"); + + return { + name: `Slack message to ${channelName}`, + alertTypes: Array.isArray(formData.alertTypes) + ? formData.alertTypes + : [formData.alertTypes], + environmentTypes: Array.isArray(formData.environmentTypes) + ? formData.environmentTypes + : [formData.environmentTypes], + channel: { + type: "SLACK", + channelId, + channelName, + integrationId: formData.integrationId, + }, + }; + } + } +} + +export async function loader({ request, params }: LoaderFunctionArgs) { + const userId = await requireUserId(request); + const { organizationSlug, projectParam } = ProjectParamSchema.parse(params); + + const project = await findProjectBySlug(organizationSlug, projectParam, userId); + + if (!project) { + throw new Response("Project not found", { status: 404 }); + } + + const presenter = new NewAlertChannelPresenter(); + + const results = await presenter.call(project.id); + + const url = new URL(request.url); + const option = url.searchParams.get("option"); + + return typedjson({ + ...results, + option: option === "slack" ? ("SLACK" as const) : undefined, + }); +} + +export const action = async ({ request, params }: ActionFunctionArgs) => { + const userId = await requireUserId(request); + const { organizationSlug, projectParam } = ProjectParamSchema.parse(params); + + if (request.method.toUpperCase() !== "POST") { + return { status: 405, body: "Method Not Allowed" }; + } + + const formData = await request.formData(); + + const submission = parse(formData, { schema: FormSchema }); + + if (!submission.value) { + return json(submission); + } + + const project = await findProjectBySlug(organizationSlug, projectParam, userId); + + if (!project) { + submission.error.key = "Project not found"; + return json(submission); + } + + const service = new CreateAlertChannelService(); + const alertChannel = await service.call( + project.externalRef, + userId, + formDataToCreateAlertChannelOptions(submission.value) + ); + + if (!alertChannel) { + submission.error.key = "Failed to create alert channel"; + return json(submission); + } + + return redirectWithSuccessMessage( + v3ProjectAlertsPath({ slug: organizationSlug }, { slug: projectParam }), + request, + `Created ${alertChannel.name} alert` + ); +}; + +export default function Page() { + const [isOpen, setIsOpen] = useState(false); + const { slack, option } = useTypedLoaderData(); + const lastSubmission = useActionData(); + const navigation = useNavigation(); + const navigate = useNavigate(); + const organization = useOrganization(); + const project = useProject(); + const [currentAlertChannel, setCurrentAlertChannel] = useState(option ?? "EMAIL"); + + const [selectedSlackChannelValue, setSelectedSlackChannelValue] = useState(); + + const selectedSlackChannel = slack.channels?.find( + (s) => selectedSlackChannelValue === `${s.id}/${s.name}` + ); + + const isLoading = + navigation.state !== "idle" && + navigation.formMethod === "post" && + navigation.formData?.get("action") === "create"; + + const [form, { channelValue: channelValue, alertTypes, environmentTypes, type, integrationId }] = + useForm({ + id: "create-alert", + // TODO: type this + lastSubmission: lastSubmission as any, + onValidate({ formData }) { + return parse(formData, { schema: FormSchema }); + }, + shouldRevalidate: "onSubmit", + }); + + useEffect(() => { + setIsOpen(true); + }, []); + + useEffect(() => { + if (navigation.state !== "idle") return; + if (lastSubmission !== undefined) return; + + form.ref.current?.reset(); + }, [navigation.state, lastSubmission]); + + return ( + { + if (!o) { + navigate(v3ProjectAlertsPath(organization, project)); + } + }} + > + + New alert +
+
+ + { + setCurrentAlertChannel(value); + }} + fullWidth + defaultValue={currentAlertChannel ?? undefined} + /> + + + {currentAlertChannel === "EMAIL" ? ( + + + + {channelValue.error} + + ) : currentAlertChannel === "SLACK" ? ( + + {slack.status === "READY" ? ( + <> + + {selectedSlackChannel && selectedSlackChannel.is_private && ( + + To receive alerts in the{" "} + {selectedSlackChannel.name}{" "} + channel, you need to invite the @Trigger.dev Slack Bot. Go to the channel in + Slack and type:{" "} + /invite @Trigger.dev. + + )} + + {channelValue.error} + + + ) : slack.status === "NOT_CONFIGURED" ? ( + + + Connect to Slack + + + ) : ( + + Slack integration is not available. Please contact your organization + administrator. + + )} + + ) : ( + + + + {channelValue.error} + We'll issue POST requests to this URL with a JSON payload. + + )} + + + + +
+ + +
+ + + + + + {alertTypes.error} +
+ + + + + + {environmentTypes.error} + + {form.error} +
+ + {isLoading ? "Saving…" : "Save"} + + } + cancelButton={ + + Cancel + + } + /> +
+
+ +
+
+ ); +} + +function SlackChannelTitle({ name, is_private }: { name?: string; is_private?: boolean }) { + return ( +
+ {is_private ? : } + {name} +
+ ); +} diff --git a/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.v3.$projectParam.alerts/route.tsx b/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.v3.$projectParam.alerts/route.tsx new file mode 100644 index 0000000000..fab85a8943 --- /dev/null +++ b/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.v3.$projectParam.alerts/route.tsx @@ -0,0 +1,496 @@ +import { useForm } from "@conform-to/react"; +import { parse } from "@conform-to/zod"; +import { + ArrowUpRightIcon, + BellAlertIcon, + BellSlashIcon, + BookOpenIcon, + EnvelopeIcon, + GlobeAltIcon, + LockClosedIcon, + PlusIcon, + TrashIcon, +} from "@heroicons/react/20/solid"; +import { Form, Outlet, useActionData, useNavigation } from "@remix-run/react"; +import { ActionFunctionArgs, LoaderFunctionArgs, json } from "@remix-run/server-runtime"; +import { SlackIcon } from "@trigger.dev/companyicons"; +import { ProjectAlertChannelType, ProjectAlertType } from "@trigger.dev/database"; +import assertNever from "assert-never"; +import { typedjson, useTypedLoaderData } from "remix-typedjson"; +import { z } from "zod"; +import { EnvironmentTypeLabel } from "~/components/environments/EnvironmentLabel"; +import { PageBody, PageContainer } from "~/components/layout/AppLayout"; +import { Button, LinkButton } from "~/components/primitives/Buttons"; +import { ClipboardField } from "~/components/primitives/ClipboardField"; +import { DetailCell } from "~/components/primitives/DetailCell"; +import { Header2 } from "~/components/primitives/Headers"; +import { NavBar, PageAccessories, PageTitle } from "~/components/primitives/PageHeader"; +import { Paragraph } from "~/components/primitives/Paragraph"; +import { + Table, + TableBody, + TableCell, + TableCellMenu, + TableHeader, + TableHeaderCell, + TableRow, +} from "~/components/primitives/Table"; +import { + Tooltip, + TooltipContent, + TooltipProvider, + TooltipTrigger, +} from "~/components/primitives/Tooltip"; +import { EnabledStatus } from "~/components/runs/v3/EnabledStatus"; +import { prisma } from "~/db.server"; +import { useOrganization } from "~/hooks/useOrganizations"; +import { useProject } from "~/hooks/useProject"; +import { redirectWithSuccessMessage } from "~/models/message.server"; +import { findProjectBySlug } from "~/models/project.server"; +import { + AlertChannelListPresenter, + AlertChannelListPresenterRecord, +} from "~/presenters/v3/AlertChannelListPresenter.server"; +import { requireUserId } from "~/services/session.server"; +import { cn } from "~/utils/cn"; +import { + ProjectParamSchema, + docsPath, + v3NewProjectAlertPath, + v3ProjectAlertsPath, +} from "~/utils/pathBuilder"; + +export const loader = async ({ request, params }: LoaderFunctionArgs) => { + const userId = await requireUserId(request); + const { projectParam, organizationSlug } = ProjectParamSchema.parse(params); + + const project = await findProjectBySlug(organizationSlug, projectParam, userId); + + if (!project) { + throw new Response(undefined, { + status: 404, + statusText: "Project not found", + }); + } + + const presenter = new AlertChannelListPresenter(); + const data = await presenter.call(project.id); + + return typedjson(data); +}; + +const schema = z.discriminatedUnion("action", [ + z.object({ action: z.literal("delete"), id: z.string() }), + z.object({ action: z.literal("disable"), id: z.string() }), + z.object({ action: z.literal("enable"), id: z.string() }), +]); + +export const action = async ({ request, params }: ActionFunctionArgs) => { + const userId = await requireUserId(request); + const { organizationSlug, projectParam } = ProjectParamSchema.parse(params); + + if (request.method.toUpperCase() !== "POST") { + return { status: 405, body: "Method Not Allowed" }; + } + + const formData = await request.formData(); + const submission = parse(formData, { schema }); + + if (!submission.value) { + return json(submission); + } + + const project = await findProjectBySlug(organizationSlug, projectParam, userId); + + if (!project) { + submission.error.key = "Project not found"; + return json(submission); + } + + switch (submission.value.action) { + case "delete": { + const alertChannel = await prisma.projectAlertChannel.delete({ + where: { id: submission.value.id, projectId: project.id }, + }); + + return redirectWithSuccessMessage( + v3ProjectAlertsPath({ slug: organizationSlug }, { slug: projectParam }), + request, + `Deleted ${alertChannel.name} alert` + ); + } + case "disable": { + const alertChannel = await prisma.projectAlertChannel.update({ + where: { id: submission.value.id, projectId: project.id }, + data: { enabled: false }, + }); + + return redirectWithSuccessMessage( + v3ProjectAlertsPath({ slug: organizationSlug }, { slug: projectParam }), + request, + `Disabled ${alertChannel.name} alert` + ); + } + case "enable": { + const alertChannel = await prisma.projectAlertChannel.update({ + where: { id: submission.value.id, projectId: project.id }, + data: { enabled: true }, + }); + + return redirectWithSuccessMessage( + v3ProjectAlertsPath({ slug: organizationSlug }, { slug: projectParam }), + request, + `Enabled ${alertChannel.name} alert` + ); + } + } +}; + +export default function Page() { + const { alertChannels } = useTypedLoaderData(); + const project = useProject(); + const organization = useOrganization(); + + return ( + + + + + + Alerts docs + + + + +
+ {alertChannels.length > 0 && alertChannels.length < 10 && ( +
+ Project alerts + + New alert + +
+ )} +
+ + + Name + Alert types + Environments + Channel + Enabled + Actions + + + + {alertChannels.length > 0 ? ( + alertChannels.map((alertChannel) => ( + + + {alertChannel.name} + + + {alertChannel.alertTypes.map((type) => alertTypeTitle(type)).join(", ")} + + + {alertChannel.environmentTypes.map((environmentType) => ( + + ))} + + + + + + + + + {alertChannel.enabled ? ( + + ) : ( + + )} + + + + + )) + ) : ( + + +
+ + You haven't created any project alerts yet + + + Get alerted when runs or deployments fail, or when deployments succeed in + both Prod and Staging environments. + + + New alert + +
+
+
+ )} +
+
+
+ Platform alerts + + Subscribe to get email notifications when Trigger.dev creates, updates or resolves a + platform incident. + + + Subscribe + +
+
+ + + + ); +} + +function DeleteAlertChannelButton(props: { id: string }) { + const lastSubmission = useActionData(); + const navigation = useNavigation(); + + const isLoading = + navigation.state !== "idle" && + navigation.formMethod === "post" && + navigation.formData?.get("action") === "delete"; + + const [form, { id }] = useForm({ + id: "delete-alert-channel", + // TODO: type this + lastSubmission: lastSubmission as any, + onValidate({ formData }) { + return parse(formData, { schema }); + }, + shouldRevalidate: "onSubmit", + }); + + return ( +
+ + +
+ ); +} + +function DisableAlertChannelButton(props: { id: string }) { + const lastSubmission = useActionData(); + const navigation = useNavigation(); + + const isLoading = + navigation.state !== "idle" && + navigation.formMethod === "post" && + navigation.formData?.get("action") === "delete"; + + const [form, { id }] = useForm({ + id: "disable-alert-channel", + // TODO: type this + lastSubmission: lastSubmission as any, + onValidate({ formData }) { + return parse(formData, { schema }); + }, + shouldRevalidate: "onSubmit", + }); + + return ( +
+ + + +
+ ); +} + +function EnableAlertChannelButton(props: { id: string }) { + const lastSubmission = useActionData(); + const navigation = useNavigation(); + + const isLoading = + navigation.state !== "idle" && + navigation.formMethod === "post" && + navigation.formData?.get("action") === "delete"; + + const [form, { id }] = useForm({ + id: "enable-alert-channel", + // TODO: type this + lastSubmission: lastSubmission as any, + onValidate({ formData }) { + return parse(formData, { schema }); + }, + shouldRevalidate: "onSubmit", + }); + + return ( +
+ + + +
+ ); +} + +function AlertChannelDetails({ alertChannel }: { alertChannel: AlertChannelListPresenterRecord }) { + switch (alertChannel.properties?.type) { + case "EMAIL": { + return ( + + } + leadingIconClassName="text-charcoal-400" + label={"Email"} + description={alertChannel.properties.email} + /> + ); + } + case "WEBHOOK": { + return ( + + + + + + Webhook + + + } + leadingIconClassName="text-charcoal-400" + label={alertChannel.properties.url} + description={ + } + iconButton + secure={"β€’".repeat(alertChannel.properties.secret.length)} + className="mt-1 w-80" + /> + } + /> + ); + } + case "SLACK": { + return ( + + } + leadingIconClassName="text-charcoal-400" + label={"Slack"} + description={`#${alertChannel.properties.channelName}`} + /> + ); + } + } + + return null; +} + +export function alertTypeTitle(alertType: ProjectAlertType): string { + switch (alertType) { + case "TASK_RUN_ATTEMPT": + return "Task attempt failure"; + case "DEPLOYMENT_FAILURE": + return "Deployment failure"; + case "DEPLOYMENT_SUCCESS": + return "Deployment success"; + default: { + assertNever(alertType); + } + } +} + +export function AlertChannelTypeIcon({ + channelType, + className, +}: { + channelType: ProjectAlertChannelType; + className: string; +}) { + switch (channelType) { + case "EMAIL": + return ; + case "SLACK": + return ; + case "WEBHOOK": + return ; + default: { + assertNever(channelType); + } + } +} diff --git a/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.v3.$projectParam.environment-variables.new/route.tsx b/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.v3.$projectParam.environment-variables.new/route.tsx index 418d82e167..016dc204cb 100644 --- a/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.v3.$projectParam.environment-variables.new/route.tsx +++ b/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.v3.$projectParam.environment-variables.new/route.tsx @@ -18,7 +18,7 @@ import { environmentTitle, } from "~/components/environments/EnvironmentLabel"; import { Button, LinkButton } from "~/components/primitives/Buttons"; -import { Checkbox } from "~/components/primitives/Checkbox"; +import { CheckboxWithLabel } from "~/components/primitives/Checkbox"; import { Dialog, DialogContent, DialogHeader } from "~/components/primitives/Dialog"; import { Fieldset } from "~/components/primitives/Fieldset"; import { FormButtons } from "~/components/primitives/FormButtons"; @@ -46,13 +46,12 @@ export const loader = async ({ request, params }: LoaderFunctionArgs) => { try { const presenter = new EnvironmentVariablesPresenter(); - const { environmentVariables, environments } = await presenter.call({ + const { environments } = await presenter.call({ userId, projectSlug: projectParam, }); return typedjson({ - environmentVariables, environments, }); } catch (error) { @@ -150,7 +149,7 @@ export const action = async ({ request, params }: ActionFunctionArgs) => { export default function Page() { const [isOpen, setIsOpen] = useState(false); - const { environmentVariables, environments } = useTypedLoaderData(); + const { environments } = useTypedLoaderData(); const lastSubmission = useActionData(); const navigation = useNavigation(); const navigate = useNavigate(); @@ -196,7 +195,7 @@ export default function Page() {
{environments.map((environment) => ( - { const userId = await requireUserId(request); const { projectParam, organizationSlug } = ProjectParamSchema.parse(params); const url = new URL(request.url); - const s = Object.fromEntries(url.searchParams.entries()); - const { tasks, versions, statuses, environments, from, to, cursor, direction } = + const s = { + cursor: url.searchParams.get("cursor") ?? undefined, + direction: url.searchParams.get("direction") ?? undefined, + statuses: url.searchParams.getAll("statuses"), + environments: url.searchParams.getAll("environments"), + tasks: url.searchParams.getAll("tasks"), + period: url.searchParams.get("period") ?? undefined, + bulkId: url.searchParams.get("bulkId") ?? undefined, + }; + const { tasks, versions, statuses, environments, period, bulkId, from, to, cursor, direction } = TaskRunListSearchFilters.parse(s); const presenter = new RunListPresenter(); @@ -42,6 +65,8 @@ export const loader = async ({ request, params }: LoaderFunctionArgs) => { versions, statuses, environments, + period, + bulkId, from, to, direction: direction, @@ -65,59 +90,246 @@ export default function Page() { - - -
- - Loading runs -
-
- } + + - - {(list) => ( - <> - {list.runs.length === 0 && !list.hasFilters ? ( - list.possibleTasks.length === 0 ? ( - - ) : ( - - ) - ) : ( -
-
-
- -
- -
+ {({ selectedItems }) => ( +
+
+ +
+ + Loading runs
- - -
-
- )} - - )} - - + } + > + + {(list) => ( + <> + {list.runs.length === 0 && !list.hasFilters ? ( + list.possibleTasks.length === 0 ? ( + + ) : ( + + ) + ) : ( +
+
+
+ +
+ +
+
+ + + +
+
+ )} + + )} +
+ +
+ +
+ )} + ); } +function BulkActionBar() { + const { selectedItems, deselectAll } = useSelectedItems(); + const [barState, setBarState] = useState<"none" | "replay" | "cancel">("none"); + + const hasSelectedMaximum = selectedItems.size >= BULK_ACTION_RUN_LIMIT; + + return ( + + {selectedItems.size > 0 && ( + +
+ Bulk actions: + {hasSelectedMaximum ? ( + Maximum of {selectedItems.size} runs selected + ) : ( + {selectedItems.size} runs selected + )} +
+
+ { + if (o) { + setBarState("cancel"); + } else { + setBarState("none"); + } + }} + /> + { + if (o) { + setBarState("replay"); + } else { + setBarState("none"); + } + }} + /> + +
+
+ )} +
+ ); +} + +function CancelRuns({ onOpen }: { onOpen: (open: boolean) => void }) { + const { selectedItems } = useSelectedItems(); + + const organization = useOrganization(); + const project = useProject(); + const failedRedirect = v3RunsPath(organization, project); + + const formAction = `/resources/taskruns/bulk/cancel`; + + const navigation = useNavigation(); + const isLoading = navigation.formAction === formAction; + + return ( + onOpen(o)}> + + + + + Cancel {selectedItems.size} runs? + + Canceling these runs will stop them from running. Only runs that are not already finished + will be canceled, the others will remain in their existing state. + + +
+ + + + {[...selectedItems].map((runId) => ( + + ))} + +
+
+
+
+ ); +} + +function ReplayRuns({ onOpen }: { onOpen: (open: boolean) => void }) { + const { selectedItems } = useSelectedItems(); + + const organization = useOrganization(); + const project = useProject(); + const failedRedirect = v3RunsPath(organization, project); + + const formAction = `/resources/taskruns/bulk/replay`; + + const navigation = useNavigation(); + const isLoading = navigation.formAction === formAction; + + return ( + onOpen(o)}> + + + + + Replay runs? + + Replaying these runs will create a new run for each with the same payload and environment + as the original. It will use the latest version of the code for each task. + + +
+ + + + {[...selectedItems].map((runId) => ( + + ))} + +
+
+
+
+ ); +} + function CreateFirstTaskInstructions() { const organization = useOrganization(); const project = useProject(); diff --git a/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.v3.$projectParam.schedules.$scheduleParam/route.tsx b/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.v3.$projectParam.schedules.$scheduleParam/route.tsx index 760cbe783b..bc28a1607c 100644 --- a/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.v3.$projectParam.schedules.$scheduleParam/route.tsx +++ b/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.v3.$projectParam.schedules.$scheduleParam/route.tsx @@ -8,7 +8,7 @@ import { typedjson, useTypedLoaderData } from "remix-typedjson"; import { z } from "zod"; import { ExitIcon } from "~/assets/icons/ExitIcon"; import { InlineCode } from "~/components/code/InlineCode"; -import { EnvironmentLabel } from "~/components/environments/EnvironmentLabel"; +import { EnvironmentLabel, EnvironmentLabels } from "~/components/environments/EnvironmentLabel"; import { Button, LinkButton } from "~/components/primitives/Buttons"; import { DateTime } from "~/components/primitives/DateTime"; import { @@ -211,16 +211,7 @@ export default function Page() {
-
- {schedule.environments.map((env) => ( - - ))} -
+
{schedule.externalId ? schedule.externalId : "–"} diff --git a/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.v3.$projectParam.schedules/route.tsx b/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.v3.$projectParam.schedules/route.tsx index 710a6f0357..6a9a7c50ca 100644 --- a/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.v3.$projectParam.schedules/route.tsx +++ b/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.v3.$projectParam.schedules/route.tsx @@ -6,7 +6,7 @@ import { typedjson, useTypedLoaderData } from "remix-typedjson"; import { BlankstateInstructions } from "~/components/BlankstateInstructions"; import { AdminDebugTooltip } from "~/components/admin/debugTooltip"; import { InlineCode } from "~/components/code/InlineCode"; -import { EnvironmentLabel } from "~/components/environments/EnvironmentLabel"; +import { EnvironmentLabel, EnvironmentLabels } from "~/components/environments/EnvironmentLabel"; import { MainCenteredContainer, PageBody, PageContainer } from "~/components/layout/AppLayout"; import { LinkButton } from "~/components/primitives/Buttons"; import { DateTime } from "~/components/primitives/DateTime"; @@ -281,15 +281,7 @@ function SchedulesTable({ {schedule.lastRun ? : "–"} -
- {schedule.environments.map((environment) => ( - - ))} -
+
diff --git a/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.v3.$projectParam.test/route.tsx b/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.v3.$projectParam.test/route.tsx index 3db6aaf271..8f96fd3d3d 100644 --- a/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.v3.$projectParam.test/route.tsx +++ b/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.v3.$projectParam.test/route.tsx @@ -30,11 +30,11 @@ import { } from "~/components/primitives/Table"; import { TaskFunctionName } from "~/components/runs/v3/TaskPath"; import { TaskTriggerSourceIcon } from "~/components/runs/v3/TaskTriggerSource"; +import { useFilterTasks } from "~/hooks/useFilterTasks"; import { useLinkStatus } from "~/hooks/useLinkStatus"; import { useOptimisticLocation } from "~/hooks/useOptimisticLocation"; import { useOrganization } from "~/hooks/useOrganizations"; import { useProject } from "~/hooks/useProject"; -import { useTextFilter } from "~/hooks/useTextFilter"; import { SelectedEnvironment, TaskListItem, @@ -153,36 +153,7 @@ function TaskSelector({ tasks: TaskListItem[]; environmentSlug: string; }) { - const { filterText, setFilterText, filteredItems } = useTextFilter({ - items: tasks, - filter: (task, text) => { - if (task.taskIdentifier.toLowerCase().includes(text.toLowerCase())) { - return true; - } - - if (task.exportName.toLowerCase().includes(text.toLowerCase())) { - return true; - } - - if (task.filePath.toLowerCase().includes(text.toLowerCase())) { - return true; - } - - if (task.id.toLowerCase().includes(text.toLowerCase())) { - return true; - } - - if (task.friendlyId.toLowerCase().includes(text.toLowerCase())) { - return true; - } - - if (task.triggerSource === "SCHEDULED" && "scheduled".includes(text.toLowerCase())) { - return true; - } - - return false; - }, - }); + const { filterText, setFilterText, filteredItems } = useFilterTasks({ tasks }); return (
diff --git a/apps/webapp/app/routes/_app.orgs.$organizationSlug_.projects.new/route.tsx b/apps/webapp/app/routes/_app.orgs.$organizationSlug_.projects.new/route.tsx index eb7bad6add..16c0a5c587 100644 --- a/apps/webapp/app/routes/_app.orgs.$organizationSlug_.projects.new/route.tsx +++ b/apps/webapp/app/routes/_app.orgs.$organizationSlug_.projects.new/route.tsx @@ -16,14 +16,7 @@ import { FormTitle } from "~/components/primitives/FormTitle"; import { Input } from "~/components/primitives/Input"; import { InputGroup } from "~/components/primitives/InputGroup"; import { Label } from "~/components/primitives/Label"; -import { - Select, - SelectContent, - SelectGroup, - SelectItem, - SelectTrigger, - SelectValue, -} from "~/components/primitives/Select"; +import { Select, SelectItem } from "~/components/primitives/Select"; import { prisma } from "~/db.server"; import { useFeatures } from "~/hooks/useFeatures"; import { redirectWithSuccessMessage } from "~/models/message.server"; @@ -148,20 +141,24 @@ export default function NewOrganizationPage() { {canCreateV3Projects ? ( - - - + {projectVersion.error} ) : ( diff --git a/apps/webapp/app/routes/_app.orgs.new/route.tsx b/apps/webapp/app/routes/_app.orgs.new/route.tsx index 563215901e..eaadf34202 100644 --- a/apps/webapp/app/routes/_app.orgs.new/route.tsx +++ b/apps/webapp/app/routes/_app.orgs.new/route.tsx @@ -17,14 +17,7 @@ import { Input } from "~/components/primitives/Input"; import { InputGroup } from "~/components/primitives/InputGroup"; import { Label } from "~/components/primitives/Label"; import { RadioGroupItem } from "~/components/primitives/RadioButton"; -import { - Select, - SelectContent, - SelectGroup, - SelectItem, - SelectTrigger, - SelectValue, -} from "~/components/primitives/Select"; +import { Select, SelectItem } from "~/components/primitives/Select"; import { featuresForRequest } from "~/features.server"; import { useFeatures } from "~/hooks/useFeatures"; import { createOrganization } from "~/models/organization.server"; @@ -143,17 +136,24 @@ export default function NewOrganizationPage() { {v3Enabled ? ( - - - + {projectVersion.error} ) : ( diff --git a/apps/webapp/app/routes/account._index/route.tsx b/apps/webapp/app/routes/account._index/route.tsx index 8a5c97412d..4f31e93575 100644 --- a/apps/webapp/app/routes/account._index/route.tsx +++ b/apps/webapp/app/routes/account._index/route.tsx @@ -6,7 +6,7 @@ import { z } from "zod"; import { UserProfilePhoto } from "~/components/UserProfilePhoto"; import { PageBody, PageContainer } from "~/components/layout/AppLayout"; import { Button } from "~/components/primitives/Buttons"; -import { Checkbox } from "~/components/primitives/Checkbox"; +import { CheckboxWithLabel } from "~/components/primitives/Checkbox"; import { Fieldset } from "~/components/primitives/Fieldset"; import { FormButtons } from "~/components/primitives/FormButtons"; import { FormError } from "~/components/primitives/FormError"; @@ -158,7 +158,7 @@ export default function Page() { - + ApiAlertChannelPresenter.alertTypeFromApi(type) + ), + channel: { + type: "EMAIL", + email: body.data.channelData.email, + }, + deduplicationKey: body.data.deduplicationKey, + environmentTypes: body.data.environmentTypes, + }); + + return json(await ApiAlertChannelPresenter.alertChannelToApi(alertChannel)); + } + + if (body.data.channel === "webhook") { + if (!body.data.channelData.url) { + return json({ error: "webhook url is required" }, { status: 422 }); + } + + const alertChannel = await service.call(projectRef, authenticationResult.userId, { + name: body.data.name, + alertTypes: body.data.alertTypes.map((type) => + ApiAlertChannelPresenter.alertTypeFromApi(type) + ), + channel: { + type: "WEBHOOK", + url: body.data.channelData.url, + secret: body.data.channelData.secret, + }, + deduplicationKey: body.data.deduplicationKey, + environmentTypes: body.data.environmentTypes, + }); + + return json(await ApiAlertChannelPresenter.alertChannelToApi(alertChannel)); + } + + return json({ error: "Invalid channel type" }, { status: 422 }); + } catch (error) { + if (error instanceof ServiceValidationError) { + return json({ error: error.message }, { status: 422 }); + } + + return json( + { error: error instanceof Error ? error.message : "Internal Server Error" }, + { status: 500 } + ); + } +} diff --git a/apps/webapp/app/routes/api.v1.tasks.$taskId.trigger.ts b/apps/webapp/app/routes/api.v1.tasks.$taskId.trigger.ts index 2301974a1b..0d481b8262 100644 --- a/apps/webapp/app/routes/api.v1.tasks.$taskId.trigger.ts +++ b/apps/webapp/app/routes/api.v1.tasks.$taskId.trigger.ts @@ -11,9 +11,10 @@ const ParamsSchema = z.object({ }); export const HeadersSchema = z.object({ - "idempotency-key": z.string().optional().nullable(), - "trigger-version": z.string().optional().nullable(), - "x-trigger-span-parent-as-link": z.coerce.number().optional().nullable(), + "idempotency-key": z.string().nullish(), + "trigger-version": z.string().nullish(), + "x-trigger-span-parent-as-link": z.coerce.number().nullish(), + "x-trigger-worker": z.string().nullish(), traceparent: z.string().optional(), tracestate: z.string().optional(), }); @@ -45,6 +46,7 @@ export async function action({ request, params }: ActionFunctionArgs) { "x-trigger-span-parent-as-link": spanParentAsLink, traceparent, tracestate, + "x-trigger-worker": isFromWorker, } = headers.data; const { taskId } = ParamsSchema.parse(params); @@ -58,20 +60,31 @@ export async function action({ request, params }: ActionFunctionArgs) { return json({ error: "Invalid request body" }, { status: 400 }); } - logger.debug("Triggering task", { - taskId, - idempotencyKey, - triggerVersion, - body: body.data, - }); - const service = new TriggerTaskService(); try { + const traceContext = traceparent + ? !triggerVersion // If the trigger version is NOT set, we are in an older version of the SDK + ? { traceparent, tracestate } + : isFromWorker // If the trigger version is set, and the request is from a worker, we should pass the trace context + ? { traceparent, tracestate } + : undefined + : undefined; + + logger.debug("Triggering task", { + taskId, + idempotencyKey, + triggerVersion, + headers: Object.fromEntries(request.headers), + body: body.data, + isFromWorker, + traceContext, + }); + const run = await service.call(taskId, authenticationResult.environment, body.data, { idempotencyKey: idempotencyKey ?? undefined, triggerVersion: triggerVersion ?? undefined, - traceContext: traceparent ? { traceparent, tracestate } : undefined, + traceContext, spanParentAsLink: spanParentAsLink === 1, }); diff --git a/apps/webapp/app/routes/integrations.$serviceName.callback.ts b/apps/webapp/app/routes/integrations.$serviceName.callback.ts new file mode 100644 index 0000000000..8d2160287d --- /dev/null +++ b/apps/webapp/app/routes/integrations.$serviceName.callback.ts @@ -0,0 +1,66 @@ +import type { LoaderFunctionArgs } from "@remix-run/server-runtime"; +import z from "zod"; +import { redirectBackWithErrorMessage } from "~/models/message.server"; +import { OrgIntegrationRepository } from "~/models/orgIntegration.server"; +import { requireUserId } from "~/services/session.server"; +import { requestUrl } from "~/utils/requestUrl.server"; +import { CreateOrgIntegrationService } from "~/v3/services/createOrgIntegration.server"; + +const URLSearchSchema = z + .object({ + code: z.string().optional(), + state: z.string().optional(), + error: z.string().optional(), + }) + .passthrough(); + +const ParamsSchema = z.object({ + serviceName: z.string(), +}); + +export async function loader({ request, params }: LoaderFunctionArgs) { + if (request.method.toUpperCase() !== "GET") { + return { status: 405, body: "Method Not Allowed" }; + } + + const userId = await requireUserId(request); + + const url = requestUrl(request); + + const parsedSearchParams = URLSearchSchema.safeParse(Object.fromEntries(url.searchParams)); + + if (!parsedSearchParams.success) { + // TODO: this needs to lookup the redirect url in the cookies + throw new Response("Invalid params", { status: 400 }); + } + + if (parsedSearchParams.data.error) { + // TODO: this needs to lookup the redirect url in the cookies + throw new Response(parsedSearchParams.data.error, { status: 400 }); + } + + if (!parsedSearchParams.data.code || !parsedSearchParams.data.state) { + throw new Response("Invalid params", { status: 400 }); + } + + const parsedParams = ParamsSchema.safeParse(params); + + if (!parsedParams.success) { + throw new Response("Invalid params", { status: 400 }); + } + + const service = new CreateOrgIntegrationService(); + + const integration = await service.call( + userId, + parsedSearchParams.data.state, + parsedParams.data.serviceName, + parsedSearchParams.data.code + ); + + if (integration) { + return await OrgIntegrationRepository.redirectAfterAuth(request); + } + + return redirectBackWithErrorMessage(request, "Failed to connect to the service"); +} diff --git a/apps/webapp/app/routes/internal.webhooks.slack.interactivity.ts b/apps/webapp/app/routes/internal.webhooks.slack.interactivity.ts new file mode 100644 index 0000000000..e0706a20fc --- /dev/null +++ b/apps/webapp/app/routes/internal.webhooks.slack.interactivity.ts @@ -0,0 +1,5 @@ +import { ActionFunctionArgs } from "@remix-run/server-runtime"; + +export function action({ request }: ActionFunctionArgs) { + return new Response(null, { status: 200 }); +} diff --git a/apps/webapp/app/routes/resources.orgs.$organizationSlug.projects.$projectParam.schedules.new/route.tsx b/apps/webapp/app/routes/resources.orgs.$organizationSlug.projects.$projectParam.schedules.new/route.tsx index 692a34050c..deb17f3e15 100644 --- a/apps/webapp/app/routes/resources.orgs.$organizationSlug.projects.$projectParam.schedules.new/route.tsx +++ b/apps/webapp/app/routes/resources.orgs.$organizationSlug.projects.$projectParam.schedules.new/route.tsx @@ -11,7 +11,7 @@ import { environmentTitle, } from "~/components/environments/EnvironmentLabel"; import { Button, LinkButton } from "~/components/primitives/Buttons"; -import { Checkbox } from "~/components/primitives/Checkbox"; +import { CheckboxWithLabel } from "~/components/primitives/Checkbox"; import { DateTime } from "~/components/primitives/DateTime"; import { Fieldset } from "~/components/primitives/Fieldset"; import { FormError } from "~/components/primitives/FormError"; @@ -21,14 +21,7 @@ import { Input } from "~/components/primitives/Input"; import { InputGroup } from "~/components/primitives/InputGroup"; import { Label } from "~/components/primitives/Label"; import { Paragraph } from "~/components/primitives/Paragraph"; -import { - Select, - SelectContent, - SelectGroup, - SelectItem, - SelectTrigger, - SelectValue, -} from "~/components/primitives/Select"; +import { Select, SelectItem } from "~/components/primitives/Select"; import { Table, TableBody, @@ -189,28 +182,25 @@ export function UpsertScheduleForm({
- - task.toLowerCase().includes(search.toLowerCase())} + dropdownIcon + > + {(matches) => ( + <> + {matches?.map((task) => ( - - {task} - + {task} ))} - - - + + )} + {taskIdentifier.error} {showGenerateField && } @@ -275,7 +265,7 @@ export function UpsertScheduleForm({
{possibleEnvironments.map((environment) => ( - Continue with GitHub - + - - -
- - +
+
+ +
+
+
+ ); +} + +const filterTypes = [ + { + name: "status", + title: "Status", + icon: , + }, + { name: "environment", title: "Environment", icon: }, +]; + +type FilterType = (typeof filterTypes)[number]["name"]; + +//todo what if we had shortcut keys that would deeplink you to the appropriate filter? +function Filter() { + const [filterType, setFilterType] = useState(); + const [searchValue, setSearchValue] = useState(""); + const shortcut = { key: "f" }; + + const clearSearchValue = useCallback(() => { + setSearchValue(""); + }, [setSearchValue]); + + const filterTrigger = ( + + Filter + + ); + + return ( + { + startTransition(() => { + setSearchValue(value); + }); + }} + setOpen={(open) => { + if (!open) { + setFilterType(undefined); + } + }} + > + + + ); +} + +type MenuProps = { + searchValue: string; + clearSearchValue: () => void; + shortcut: ShortcutDefinition; + trigger: React.ReactNode; + filterType: FilterType | undefined; + setFilterType: (filterType: FilterType | undefined) => void; +}; + +function Menu(props: MenuProps) { + switch (props.filterType) { + case undefined: + return ; + case "status": + return ; + case "environment": + return ; + } + return <>; +} + +function MainMenu({ searchValue, clearSearchValue, setFilterType, trigger, shortcut }: MenuProps) { + const filtered = useMemo(() => { + return filterTypes.filter((item) => + item.title.toLowerCase().includes(searchValue.toLowerCase()) + ); + }, [searchValue]); + + return ( + + {trigger} + + + + {filtered.map((type, index) => ( + { + clearSearchValue(); + setFilterType(type.name); + }} + icon={type.icon} + shortcut={shortcutFromIndex(index, { shortcutsEnabled: true })} + > + {type.title} + + ))} + + + + ); +} + +const statuses = allTaskRunStatuses.map((status) => ({ + title: runStatusTitle(status), + value: status, +})); + +function Statuses({ trigger, clearSearchValue, shortcut, searchValue, setFilterType }: MenuProps) { + const { values, replace } = useSearchParams(); + + const handleChange = useCallback((values: string[]) => { + clearSearchValue(); + replace({ status: values }); + }, []); + + const filtered = useMemo(() => { + return statuses.filter((item) => item.title.toLowerCase().includes(searchValue.toLowerCase())); + }, [searchValue]); + + return ( + + {trigger} + { + setFilterType(undefined); + return false; + }} + > + + + {filtered.map((item, index) => ( + + + + ))} + + + + ); +} + +const environments = [ + { + type: "DEVELOPMENT" as const, + }, + { + type: "STAGING" as const, + }, + { + type: "PRODUCTION" as const, + }, +]; + +function Environments({ + trigger, + clearSearchValue, + shortcut, + searchValue, + setFilterType, +}: MenuProps) { + const { values, replace } = useSearchParams(); + + const handleChange = useCallback((values: string[]) => { + clearSearchValue(); + replace({ environment: values }); + }, []); + + const filtered = useMemo(() => { + return environments.filter((item) => + item.type.toLowerCase().includes(searchValue.toLowerCase()) + ); + }, [searchValue]); + + return ( + + {trigger} + { + setFilterType(undefined); + return false; + }} + > + + + {filtered.map((item, index) => ( + + + + ))} + + + + ); +} diff --git a/apps/webapp/app/routes/storybook.segmented-control/route.tsx b/apps/webapp/app/routes/storybook.segmented-control/route.tsx index e331a752c2..f97d394039 100644 --- a/apps/webapp/app/routes/storybook.segmented-control/route.tsx +++ b/apps/webapp/app/routes/storybook.segmented-control/route.tsx @@ -1,4 +1,5 @@ import { MainCenteredContainer } from "~/components/layout/AppLayout"; +import { Paragraph } from "~/components/primitives/Paragraph"; import SegmentedControl from "~/components/primitives/SegmentedControl"; const options = [ @@ -8,8 +9,15 @@ const options = [ export default function Story() { return ( - - + +
+ Primary + +
+
+ Secondary + +
); } diff --git a/apps/webapp/app/routes/storybook.select/route.tsx b/apps/webapp/app/routes/storybook.select/route.tsx index f9fc2806a8..b91704c143 100644 --- a/apps/webapp/app/routes/storybook.select/route.tsx +++ b/apps/webapp/app/routes/storybook.select/route.tsx @@ -1,91 +1,272 @@ -import { Header1, Header2 } from "~/components/primitives/Headers"; +import { CircleStackIcon } from "@heroicons/react/20/solid"; +import { Form, useNavigate } from "@remix-run/react"; +import { useCallback, useState } from "react"; +import { LogoIcon } from "~/components/LogoIcon"; +import { Button } from "~/components/primitives/Buttons"; import { Select, - SelectContent, SelectGroup, + SelectGroupLabel, SelectItem, - SelectLabel, - SelectSeparator, - SelectTrigger, - SelectValue, + SelectLinkItem, + shortcutFromIndex, } from "~/components/primitives/Select"; +import { + TaskRunStatusCombo, + allTaskRunStatuses, + runStatusTitle, +} from "~/components/runs/v3/TaskRunStatus"; +import { useOptimisticLocation } from "~/hooks/useOptimisticLocation"; + +const branches = [ + "main", + "0.10-stable", + "0.11-stable", + "0.12-stable", + "0.13-stable", + "0.14-stable", + "15-stable", + "15.6-dev", + "16.3-dev", + "16.4.2-dev", + "16.8.3", + "16.8.4", + "16.8.5", + "16.8.6", + "17.0.0-dev", + "builds/facebook-www", + "devtools-v4-merge", + "fabric-cleanup", + "fabric-focus-blur", + "gh-pages", + "leg", + "nativefb-enable-cache", + "nov-main-trigger", + "rsckeys", +]; export default function Story() { return ( -
-
- Variants - size=small width=content - - + + Item 1 + + + Item 2 + - - size=small width=full - - + + Item 1 + + + Item 2 + - - size=medium width=content - - + + Item 1 + + + Item 2 + - - size=medium width=full - - + + Item 1 + + + Item 2 + - -
+ + + + + + +
+
); } + +const statuses = allTaskRunStatuses.map((status) => ({ + title: runStatusTitle(status), + value: status, +})); + +function Statuses() { + const navigate = useNavigate(); + const location = useOptimisticLocation(); + const search = new URLSearchParams(location.search); + + const handleChange = useCallback((values: string[]) => { + search.delete("status"); + for (const value of values) { + search.append("status", value); + } + navigate(`${location.pathname}?${search.toString()}`, { replace: true }); + }, []); + + return ( + + ); +} + +export const projects = [ + { + type: "section" as const, + title: "Apple", + items: [ + { + title: "iTunes", + value: "itunes", + }, + { + title: "App Store", + value: "appstore", + }, + ], + }, + { + type: "section" as const, + title: "Google", + items: [ + { + title: "Maps", + value: "maps", + }, + { + title: "Gmail", + value: "gmail", + }, + { + title: "Waymo", + value: "waymo", + }, + { + title: "Android", + value: "android", + }, + ], + }, + { + type: "section" as const, + title: "Uber", + items: [ + { + title: "Planner", + value: "planner", + }, + ], + }, +]; + +function ProjectSelector() { + const location = useOptimisticLocation(); + const search = new URLSearchParams(location.search); + + const selected = projects + .find((p) => p.items.some((i) => i.value === search.get("project"))) + ?.items.find((i) => i.value === search.get("project")); + + const searchParams = new URLSearchParams(location.search); + searchParams.delete("project"); + + return ( + + ); +} diff --git a/apps/webapp/app/routes/storybook/route.tsx b/apps/webapp/app/routes/storybook/route.tsx index f8ed181ab6..ad8e61b0b4 100644 --- a/apps/webapp/app/routes/storybook/route.tsx +++ b/apps/webapp/app/routes/storybook/route.tsx @@ -134,6 +134,10 @@ const stories: Story[] = [ name: "Select", slug: "select", }, + { + name: "Filter", + slug: "filter", + }, { name: "Popover", slug: "popover", diff --git a/apps/webapp/app/services/apiRateLimit.server.ts b/apps/webapp/app/services/apiRateLimit.server.ts index 408adef63c..29d9f81c72 100644 --- a/apps/webapp/app/services/apiRateLimit.server.ts +++ b/apps/webapp/app/services/apiRateLimit.server.ts @@ -191,6 +191,7 @@ export const apiRateLimiter = authorizationRateLimitMiddleware({ pathMatchers: [/^\/api/], // Allow /api/v1/tasks/:id/callback/:secret pathWhiteList: [ + "/api/internal/stripe_webhooks", "/api/v1/authorization-code", "/api/v1/token", /^\/api\/v1\/tasks\/[^\/]+\/callback\/[^\/]+$/, // /api/v1/tasks/$id/callback/$secret diff --git a/apps/webapp/app/services/db/graphileMigrationHelper.server.ts b/apps/webapp/app/services/db/graphileMigrationHelper.server.ts new file mode 100644 index 0000000000..0fe54df864 --- /dev/null +++ b/apps/webapp/app/services/db/graphileMigrationHelper.server.ts @@ -0,0 +1,95 @@ +import { runMigrations } from "graphile-worker"; +import { PrismaClient, prisma } from "~/db.server"; +import { env } from "~/env.server"; +import { logger } from "~/services/logger.server"; +import { PgNotifyService } from "./pgNotify.server"; +import { z } from "zod"; + +export class GraphileMigrationHelperService { + #prismaClient: PrismaClient; + + constructor(prismaClient: PrismaClient = prisma) { + this.#prismaClient = prismaClient; + } + + public async call() { + this.#logDebug("GraphileMigrationHelperService.call"); + + await this.#detectAndPrepareForMigrations(); + + await runMigrations({ + connectionString: env.DATABASE_URL, + schema: env.WORKER_SCHEMA, + }); + } + + #logDebug(message: string, args?: any) { + logger.debug(`[migrationHelper] ${message}`, args); + } + + async #getLatestMigration() { + const migrationQueryResult = await this.#prismaClient.$queryRawUnsafe(` + SELECT id FROM ${env.WORKER_SCHEMA}.migrations + ORDER BY id DESC LIMIT 1 + `); + + const MigrationQueryResultSchema = z.array(z.object({ id: z.number() })); + + const migrationResults = MigrationQueryResultSchema.parse(migrationQueryResult); + + if (!migrationResults.length) { + // no migrations applied yet + return -1; + } + + return migrationResults[0].id; + } + + async #graphileSchemaExists() { + const schemaCount = await this.#prismaClient.$executeRaw` + SELECT schema_name FROM information_schema.schemata + WHERE schema_name = ${env.WORKER_SCHEMA} + `; + + return schemaCount === 1; + } + + /** Helper for graphile-worker v0.14.0 migration. No-op if already migrated. */ + async #detectAndPrepareForMigrations() { + if (!(await this.#graphileSchemaExists())) { + // no schema yet, likely first start + return; + } + + const latestMigration = await this.#getLatestMigration(); + + if (latestMigration < 0) { + // no migrations found + return; + } + + // the first v0.14.0 migration has ID 11 + if (latestMigration > 10) { + // already migrated + return; + } + + // add 15s to graceful shutdown timeout, just to be safe + const migrationDelayInMs = env.GRACEFUL_SHUTDOWN_TIMEOUT + 15000; + + this.#logDebug("Delaying worker startup due to pending migration", { + latestMigration, + migrationDelayInMs, + }); + + console.log(`⚠️ detected pending graphile migration`); + console.log(`⚠️ notifying running workers`); + + const pgNotify = new PgNotifyService(); + await pgNotify.call("trigger:graphile:migrate", { latestMigration }); + + console.log(`⚠️ delaying worker startup by ${migrationDelayInMs}ms`); + + await new Promise((resolve) => setTimeout(resolve, migrationDelayInMs)); + } +} diff --git a/apps/webapp/app/services/email.server.ts b/apps/webapp/app/services/email.server.ts index 5e37a085f3..46f971505a 100644 --- a/apps/webapp/app/services/email.server.ts +++ b/apps/webapp/app/services/email.server.ts @@ -20,6 +20,17 @@ const client = singleton( }) ); +const alertsClient = singleton( + "alerts-email-client", + () => + new EmailClient({ + apikey: env.ALERT_RESEND_API_KEY, + imagesBaseUrl: env.APP_ORIGIN, + from: env.ALERT_FROM_EMAIL ?? "noreply@alerts.trigger.dev", + replyTo: env.REPLY_TO_EMAIL ?? "help@email.trigger.dev", + }) +); + export async function sendMagicLinkEmail(options: SendEmailOptions): Promise { // Auto redirect when in development mode if (env.NODE_ENV === "development") { @@ -67,3 +78,7 @@ export async function scheduleEmail(data: DeliverEmail, delay?: { seconds: numbe export async function sendEmail(data: DeliverEmail) { return client.send(data); } + +export async function sendAlertEmail(data: DeliverEmail) { + return alertsClient.send(data); +} diff --git a/apps/webapp/app/services/endpoints/createEndpoint.server.ts b/apps/webapp/app/services/endpoints/createEndpoint.server.ts index a77bb844f5..76a4418faa 100644 --- a/apps/webapp/app/services/endpoints/createEndpoint.server.ts +++ b/apps/webapp/app/services/endpoints/createEndpoint.server.ts @@ -4,7 +4,7 @@ import { AuthenticatedEnvironment } from "../apiAuth.server"; import { EndpointApi } from "../endpointApi.server"; import { workerQueue } from "../worker.server"; import { env } from "~/env.server"; -import { RuntimeEnvironmentType } from "@trigger.dev/database"; +import { RuntimeEnvironmentType } from "~/database-types"; const indexingHookIdentifier = customAlphabet("0123456789abcdefghijklmnopqrstuvxyz", 10); diff --git a/apps/webapp/app/services/endpoints/performEndpointIndexService.ts b/apps/webapp/app/services/endpoints/performEndpointIndexService.ts index baa0445c31..db192ef4c2 100644 --- a/apps/webapp/app/services/endpoints/performEndpointIndexService.ts +++ b/apps/webapp/app/services/endpoints/performEndpointIndexService.ts @@ -14,6 +14,9 @@ import { IndexEndpointStats } from "@trigger.dev/core"; import { RegisterHttpEndpointService } from "../triggers/registerHttpEndpoint.server"; import { RegisterWebhookService } from "../triggers/registerWebhook.server"; import { EndpointIndex } from "@trigger.dev/database"; +import { env } from "~/env.server"; + +const MAX_SEQUENTIAL_FAILURE_COUNT = env.MAX_SEQUENTIAL_INDEX_FAILURE_COUNT; export class PerformEndpointIndexService { #prismaClient: PrismaClient; @@ -56,9 +59,16 @@ export class PerformEndpointIndexService { if (!endpointIndex.endpoint.url) { logger.debug("Endpoint URL is not set", endpointIndex); - return updateEndpointIndexWithError(this.#prismaClient, id, { - message: "Endpoint URL is not set", - }); + + return updateEndpointIndexWithError( + this.#prismaClient, + id, + endpointIndex.endpoint.id, + { + message: "Endpoint URL is not set", + }, + false + ); } // Make a request to the endpoint to fetch a list of jobs @@ -69,9 +79,15 @@ export class PerformEndpointIndexService { const { response, parser, headerParser, errorParser } = await client.indexEndpoint(); if (!response) { - return updateEndpointIndexWithError(this.#prismaClient, id, { - message: `Could not connect to endpoint ${endpointIndex.endpoint.url}`, - }); + return updateEndpointIndexWithError( + this.#prismaClient, + id, + endpointIndex.endpoint.id, + { + message: `Could not connect to endpoint ${endpointIndex.endpoint.url}`, + }, + endpointIndex.endpoint.environment.type !== "DEVELOPMENT" + ); } if (isRedirect(response.status)) { @@ -83,15 +99,27 @@ export class PerformEndpointIndexService { const location = response.headers.get("location"); if (!location) { - return updateEndpointIndexWithError(this.#prismaClient, id, { - message: `Endpoint ${endpointIndex.endpoint.url} is redirecting but no location header is present`, - }); + return updateEndpointIndexWithError( + this.#prismaClient, + id, + endpointIndex.endpoint.id, + { + message: `Endpoint ${endpointIndex.endpoint.url} is redirecting but no location header is present`, + }, + endpointIndex.endpoint.environment.type !== "DEVELOPMENT" + ); } if (redirectCount > 5) { - return updateEndpointIndexWithError(this.#prismaClient, id, { - message: `Endpoint ${endpointIndex.endpoint.url} is redirecting too many times`, - }); + return updateEndpointIndexWithError( + this.#prismaClient, + id, + endpointIndex.endpoint.id, + { + message: `Endpoint ${endpointIndex.endpoint.url} is redirecting too many times`, + }, + endpointIndex.endpoint.environment.type !== "DEVELOPMENT" + ); } await this.#prismaClient.endpoint.update({ @@ -111,20 +139,38 @@ export class PerformEndpointIndexService { const body = await safeBodyFromResponse(response, errorParser); if (body) { - return updateEndpointIndexWithError(this.#prismaClient, id, { - message: body.message, - }); + return updateEndpointIndexWithError( + this.#prismaClient, + id, + endpointIndex.endpoint.id, + { + message: body.message, + }, + endpointIndex.endpoint.environment.type !== "DEVELOPMENT" + ); } - return updateEndpointIndexWithError(this.#prismaClient, id, { - message: "Trigger API key is invalid", - }); + return updateEndpointIndexWithError( + this.#prismaClient, + id, + endpointIndex.endpoint.id, + { + message: "Trigger API key is invalid", + }, + endpointIndex.endpoint.environment.type !== "DEVELOPMENT" + ); } if (!response.ok) { - return updateEndpointIndexWithError(this.#prismaClient, id, { - message: `Could not connect to endpoint ${endpointIndex.endpoint.url}. Status code: ${response.status}`, - }); + return updateEndpointIndexWithError( + this.#prismaClient, + id, + endpointIndex.endpoint.id, + { + message: `Could not connect to endpoint ${endpointIndex.endpoint.url}. Status code: ${response.status}`, + }, + endpointIndex.endpoint.environment.type !== "DEVELOPMENT" + ); } const anyBody = await response.json(); @@ -152,10 +198,16 @@ export class PerformEndpointIndexService { }).message; } - return updateEndpointIndexWithError(this.#prismaClient, id, { - message: friendlyError, - raw: fromZodError(bodyResult.error).message, - }); + return updateEndpointIndexWithError( + this.#prismaClient, + id, + endpointIndex.endpoint.id, + { + message: friendlyError, + raw: fromZodError(bodyResult.error).message, + }, + endpointIndex.endpoint.environment.type !== "DEVELOPMENT" + ); } const headerResult = headerParser.safeParse(Object.fromEntries(response.headers.entries())); @@ -163,10 +215,16 @@ export class PerformEndpointIndexService { const friendlyError = fromZodError(headerResult.error, { prefix: "Your headers are invalid", }); - return updateEndpointIndexWithError(this.#prismaClient, id, { - message: friendlyError.message, - raw: headerResult.error.issues, - }); + return updateEndpointIndexWithError( + this.#prismaClient, + id, + endpointIndex.endpoint.id, + { + message: friendlyError.message, + raw: headerResult.error.issues, + }, + endpointIndex.endpoint.environment.type !== "DEVELOPMENT" + ); } const { jobs, sources, dynamicTriggers, dynamicSchedules, httpEndpoints, webhooks } = @@ -407,8 +465,49 @@ export class PerformEndpointIndexService { async function updateEndpointIndexWithError( prismaClient: PrismaClient, id: string, - error: EndpointIndexError + endpointId: string, + error: EndpointIndexError, + checkDisabling = true ) { + // Check here to see if this endpoint has only failed for the last 50 times + // And if so, we disable the endpoint by setting the url to null + if (checkDisabling) { + const recentIndexes = await prismaClient.endpointIndex.findMany({ + where: { + endpointId, + id: { + not: id, + }, + }, + orderBy: { + createdAt: "desc", + }, + take: MAX_SEQUENTIAL_FAILURE_COUNT - 1, + select: { + status: true, + }, + }); + + if ( + recentIndexes.length === MAX_SEQUENTIAL_FAILURE_COUNT - 1 && + recentIndexes.every((index) => index.status === "FAILURE") + ) { + logger.debug("Disabling endpoint", { + endpointId, + error, + }); + + await prismaClient.endpoint.update({ + where: { + id: endpointId, + }, + data: { + url: null, + }, + }); + } + } + return await prismaClient.endpointIndex.update({ where: { id, diff --git a/apps/webapp/app/services/endpoints/validateCreateEndpoint.server.ts b/apps/webapp/app/services/endpoints/validateCreateEndpoint.server.ts index 84baa4c222..f5d2053121 100644 --- a/apps/webapp/app/services/endpoints/validateCreateEndpoint.server.ts +++ b/apps/webapp/app/services/endpoints/validateCreateEndpoint.server.ts @@ -5,7 +5,7 @@ import { AuthenticatedEnvironment } from "../apiAuth.server"; import { workerQueue } from "../worker.server"; import { CreateEndpointError } from "./createEndpoint.server"; import { EndpointApi } from "../endpointApi.server"; -import { RuntimeEnvironmentType } from "@trigger.dev/database"; +import { RuntimeEnvironmentType } from "~/database-types"; const indexingHookIdentifier = customAlphabet("0123456789abcdefghijklmnopqrstuvxyz", 10); diff --git a/apps/webapp/app/services/events/cancelRunsForEvent.server.ts b/apps/webapp/app/services/events/cancelRunsForEvent.server.ts index dca8b32282..a392c3e8f8 100644 --- a/apps/webapp/app/services/events/cancelRunsForEvent.server.ts +++ b/apps/webapp/app/services/events/cancelRunsForEvent.server.ts @@ -1,11 +1,14 @@ -import { $transaction, PrismaClient, prisma } from "~/db.server"; -import { AuthenticatedEnvironment } from "../apiAuth.server"; -import { JobRunStatus } from "@trigger.dev/database"; +import type { PrismaClient } from "~/db.server"; +import { $transaction, prisma } from "~/db.server"; +import type { AuthenticatedEnvironment } from "../apiAuth.server"; import { CancelRunService } from "../runs/cancelRun.server"; import { logger } from "../logger.server"; -import { CancelRunsForEvent } from "@trigger.dev/core"; +import type { CancelRunsForEvent } from "@trigger.dev/core"; +import type { JobRunStatus as JobRunStatusType } from "@trigger.dev/database"; +import { JobRunStatus } from "~/database-types"; -const CANCELLABLE_JOB_RUN_STATUS: JobRunStatus[] = [ + +const CANCELLABLE_JOB_RUN_STATUS: Array = [ JobRunStatus.PENDING, JobRunStatus.QUEUED, JobRunStatus.WAITING_ON_CONNECTIONS, diff --git a/apps/webapp/app/services/events/ingestSendEvent.server.ts b/apps/webapp/app/services/events/ingestSendEvent.server.ts index e0eccc3d90..034fad9718 100644 --- a/apps/webapp/app/services/events/ingestSendEvent.server.ts +++ b/apps/webapp/app/services/events/ingestSendEvent.server.ts @@ -60,6 +60,11 @@ export class IngestSendEvent { try { const deliverAt = this.#calculateDeliverAt(options); + if (!environment.organization.runsEnabled) { + logger.debug("IngestSendEvent: Runs are disabled for this organization", environment); + return; + } + return await $transaction(this.#prismaClient, async (tx) => { const externalAccount = options?.accountId ? await tx.externalAccount.upsert({ diff --git a/apps/webapp/app/services/jobs/cancelRunsForJob.server.ts b/apps/webapp/app/services/jobs/cancelRunsForJob.server.ts index ff6e112d74..0ad57ac461 100644 --- a/apps/webapp/app/services/jobs/cancelRunsForJob.server.ts +++ b/apps/webapp/app/services/jobs/cancelRunsForJob.server.ts @@ -1,11 +1,11 @@ import { $transaction, PrismaClient, prisma } from "~/db.server"; import { AuthenticatedEnvironment } from "../apiAuth.server"; -import { JobRunStatus } from "@trigger.dev/database"; import { CancelRunService } from "../runs/cancelRun.server"; import { logger } from "../logger.server"; import { CancelRunsForJob } from "@trigger.dev/core"; +import { JobRunStatus } from "~/database-types"; -const CANCELLABLE_JOB_RUN_STATUS: JobRunStatus[] = [ +const CANCELLABLE_JOB_RUN_STATUS: Array = [ JobRunStatus.PENDING, JobRunStatus.QUEUED, JobRunStatus.WAITING_ON_CONNECTIONS, diff --git a/apps/webapp/app/services/runs/startRun.server.ts b/apps/webapp/app/services/runs/startRun.server.ts index 62f9f0ed10..6dfb0a5ba6 100644 --- a/apps/webapp/app/services/runs/startRun.server.ts +++ b/apps/webapp/app/services/runs/startRun.server.ts @@ -8,6 +8,7 @@ import { $transaction, prisma } from "~/db.server"; import { workerQueue } from "../worker.server"; import { ResumeRunService } from "./resumeRun.server"; import { createHash } from "node:crypto"; +import { logger } from "../logger.server"; type FoundRun = NonNullable>>; type RunConnectionsByKey = Awaited>; @@ -36,6 +37,13 @@ export class StartRunService { } #runIsStartable(run: FoundRun) { + if (!run.organization.runsEnabled) { + logger.debug("StartRunService: Runs are disabled for this organization", { + organizationId: run.organization.id, + }); + return false; + } + const startableStatuses = ["PENDING", "WAITING_ON_CONNECTIONS"] as const; return startableStatuses.includes(run.status); } @@ -64,8 +72,6 @@ export class StartRunService { await $transaction( this.#prismaClient, async (tx) => { - await tx.$executeRaw`SELECT pg_advisory_xact_lock(${lockId})`; - const counter = await tx.jobCounter.upsert({ where: { jobId: run.jobId }, update: { lastNumber: { increment: 1 } }, @@ -146,6 +152,7 @@ async function findRun(tx: PrismaClientOrTransaction, id: string) { include: { queue: true, environment: true, + organization: true, version: { include: { integrations: { diff --git a/apps/webapp/app/services/schedules/deliverScheduledEvent.server.ts b/apps/webapp/app/services/schedules/deliverScheduledEvent.server.ts index 3dff3a9d91..e9e6fa11d1 100644 --- a/apps/webapp/app/services/schedules/deliverScheduledEvent.server.ts +++ b/apps/webapp/app/services/schedules/deliverScheduledEvent.server.ts @@ -4,6 +4,9 @@ import { NextScheduledEventService } from "./nextScheduledEvent.server"; import { IngestSendEvent } from "../events/ingestSendEvent.server"; import { InvokeDispatcherService } from "../events/invokeDispatcher.server"; import { logger } from "../logger.server"; +import { workerQueue } from "../worker.server"; + +const DEFERRED_ENQUEUE_THRESHOLD_IN_SECONDS = 60 * 60 * 2 - 1; // 2 hours - 1 second export class DeliverScheduledEventService { #prismaClient: PrismaClientOrTransaction; @@ -12,6 +15,132 @@ export class DeliverScheduledEventService { this.#prismaClient = prismaClient; } + // This runs every 10 minutes + public static async scheduleImminentDeferredEvents() { + // Find all deferred events that are due to be enqueued in the next hour + const deferredEvents = await prisma.deferredScheduledEventService.findMany({ + where: { + runAt: { + lte: new Date(Date.now() + (DEFERRED_ENQUEUE_THRESHOLD_IN_SECONDS / 2) * 1000), + }, + }, + }); + + for (const deferredEvent of deferredEvents) { + logger.debug("Enqueuing deferred scheduled event", { + scheduleSourceId: deferredEvent.scheduleSourceId, + runAt: deferredEvent.runAt, + }); + + try { + await DeliverScheduledEventService.enqueue( + deferredEvent.scheduleSourceId, + deferredEvent.runAt, + { + ts: deferredEvent.runAt, + lastTimestamp: deferredEvent.lastTimestamp ?? undefined, + } + ); + } catch (error) { + logger.error("Error enqueuing deferred scheduled event", { + scheduleSourceId: deferredEvent.scheduleSourceId, + runAt: deferredEvent.runAt, + error, + }); + } + } + } + + public static async dequeue(id: string, tx: PrismaClientOrTransaction = prisma) { + await tx.deferredScheduledEventService.deleteMany({ + where: { + scheduleSourceId: id, + }, + }); + + await workerQueue.dequeue(`scheduled:${id}`); + + await tx.scheduleSource.update({ + where: { + id, + }, + data: { + workerJobId: null, + }, + }); + } + + public static async enqueue( + id: string, + runAt: Date, + payload: ScheduledPayload, + tx: PrismaClientOrTransaction = prisma + ) { + if (runAt.getTime() - Date.now() > DEFERRED_ENQUEUE_THRESHOLD_IN_SECONDS * 1000) { + logger.debug("Deferring enqueueing events.deliverScheduled", { + id, + runAt, + payload, + }); + + await tx.deferredScheduledEventService.upsert({ + where: { + scheduleSourceId: id, + }, + create: { + scheduleSourceId: id, + runAt, + lastTimestamp: payload.lastTimestamp, + }, + update: { + runAt, + lastTimestamp: payload.lastTimestamp, + }, + }); + + await tx.scheduleSource.update({ + where: { + id, + }, + data: { + workerJobId: null, + nextEventTimestamp: runAt, + }, + }); + + await workerQueue.dequeue(`scheduled:${id}`); + } else { + await tx.deferredScheduledEventService.deleteMany({ + where: { + scheduleSourceId: id, + }, + }); + + const workerJob = await workerQueue.enqueue( + "events.deliverScheduled", + { + id: id, + payload, + }, + { + runAt, + tx, + jobKey: `scheduled:${id}`, + } + ); + + await tx.scheduleSource.update({ + where: { + id, + }, + data: { + workerJobId: workerJob.id, + nextEventTimestamp: runAt, + }, + }); + } + } + public async call(id: string, payload: ScheduledPayload) { return await $transaction( this.#prismaClient, diff --git a/apps/webapp/app/services/schedules/disableScheduleSource.server.ts b/apps/webapp/app/services/schedules/disableScheduleSource.server.ts index b8f00a5283..b53d29719f 100644 --- a/apps/webapp/app/services/schedules/disableScheduleSource.server.ts +++ b/apps/webapp/app/services/schedules/disableScheduleSource.server.ts @@ -1,6 +1,7 @@ import type { EventDispatcher } from "@trigger.dev/database"; import { $transaction, PrismaClientOrTransaction, prisma } from "~/db.server"; import { workerQueue } from "../worker.server"; +import { DeliverScheduledEventService } from "./deliverScheduledEvent.server"; export class DisableScheduleSourceService { #prismaClient: PrismaClientOrTransaction; @@ -36,7 +37,7 @@ export class DisableScheduleSourceService { }, }); - await workerQueue.dequeue(`scheduled:${scheduleSource.id}`, { tx }); + await DeliverScheduledEventService.dequeue(scheduleSource.id, tx); return scheduleSource; }); diff --git a/apps/webapp/app/services/schedules/nextScheduledEvent.server.ts b/apps/webapp/app/services/schedules/nextScheduledEvent.server.ts index 7622276246..fb9ed1cf9f 100644 --- a/apps/webapp/app/services/schedules/nextScheduledEvent.server.ts +++ b/apps/webapp/app/services/schedules/nextScheduledEvent.server.ts @@ -6,8 +6,8 @@ import { } from "@trigger.dev/core"; import { $transaction, PrismaClientOrTransaction, prisma } from "~/db.server"; import { parseExpression } from "cron-parser"; -import { workerQueue } from "../worker.server"; import { logger } from "../logger.server"; +import { DeliverScheduledEventService } from "./deliverScheduledEvent.server"; export class NextScheduledEventService { #prismaClient: PrismaClientOrTransaction; @@ -45,32 +45,16 @@ export class NextScheduledEventService { lastTimestamp: scheduleSource.lastEventTimestamp, }); - const workerJob = await workerQueue.enqueue( - "events.deliverScheduled", + await DeliverScheduledEventService.enqueue( + scheduleSource.id, + scheduleTime, { - id: scheduleSource.id, - payload: { - ts: scheduleTime, - lastTimestamp: scheduleSource.lastEventTimestamp ?? undefined, - }, + ts: scheduleTime, + lastTimestamp: scheduleSource.lastEventTimestamp ?? undefined, }, - { - runAt: scheduleTime, - tx, - jobKey: `scheduled:${scheduleSource.id}`, - } + tx ); - await this.#prismaClient.scheduleSource.update({ - where: { - id: scheduleSource.id, - }, - data: { - workerJobId: workerJob.id, - nextEventTimestamp: scheduleTime, - }, - }); - return scheduleSource; }); } diff --git a/apps/webapp/app/services/secrets/secretStore.server.ts b/apps/webapp/app/services/secrets/secretStore.server.ts index 45673d7f06..43ebf32a72 100644 --- a/apps/webapp/app/services/secrets/secretStore.server.ts +++ b/apps/webapp/app/services/secrets/secretStore.server.ts @@ -55,12 +55,14 @@ export class SecretStore { } } -const EncryptedSecretValueSchema = z.object({ +export const EncryptedSecretValueSchema = z.object({ nonce: z.string(), ciphertext: z.string(), tag: z.string(), }); +export type EncryptedSecretValue = z.infer; + /** This stores secrets in the Postgres Database, encrypted using aes-256-gcm */ class PrismaSecretStore implements SecretStoreProvider { #prismaClient: PrismaClientOrTransaction; @@ -181,18 +183,11 @@ class PrismaSecretStore implements SecretStoreProvider { } async #decrypt(nonce: string, ciphertext: string, tag: string): Promise { - const decipher = nodeCrypto.createDecipheriv( - "aes-256-gcm", - this.encryptionKey, - Buffer.from(nonce, "hex") - ); - - decipher.setAuthTag(Buffer.from(tag, "hex")); - - let decrypted = decipher.update(ciphertext, "hex", "utf8"); - decrypted += decipher.final("utf8"); - - return decrypted; + return await decryptSecret(this.encryptionKey, { + nonce, + ciphertext, + tag, + }); } async #encrypt(value: string): Promise<{ @@ -200,19 +195,7 @@ class PrismaSecretStore implements SecretStoreProvider { ciphertext: string; tag: string; }> { - const nonce = nodeCrypto.randomBytes(12); - const cipher = nodeCrypto.createCipheriv("aes-256-gcm", this.encryptionKey, nonce); - - let encrypted = cipher.update(value, "utf8", "hex"); - encrypted += cipher.final("hex"); - - const tag = cipher.getAuthTag().toString("hex"); - - return { - nonce: nonce.toString("hex"), - ciphertext: encrypted, - tag, - }; + return await encryptSecret(this.encryptionKey, value); } } @@ -234,3 +217,40 @@ export function getSecretStore< } } } + +export async function decryptSecret( + encryptionKey: string, + secret: EncryptedSecretValue +): Promise { + const decipher = nodeCrypto.createDecipheriv( + "aes-256-gcm", + encryptionKey, + Buffer.from(secret.nonce, "hex") + ); + + decipher.setAuthTag(Buffer.from(secret.tag, "hex")); + + let decrypted = decipher.update(secret.ciphertext, "hex", "utf8"); + decrypted += decipher.final("utf8"); + + return decrypted; +} + +export async function encryptSecret( + encryptionKey: string, + value: string +): Promise { + const nonce = nodeCrypto.randomBytes(12); + const cipher = nodeCrypto.createCipheriv("aes-256-gcm", encryptionKey, nonce); + + let encrypted = cipher.update(value, "utf8", "hex"); + encrypted += cipher.final("hex"); + + const tag = cipher.getAuthTag().toString("hex"); + + return { + nonce: nonce.toString("hex"), + ciphertext: encrypted, + tag, + }; +} diff --git a/apps/webapp/app/services/sources/handleHttpSource.server.ts b/apps/webapp/app/services/sources/handleHttpSource.server.ts index e68c845d44..ed96c90b2a 100644 --- a/apps/webapp/app/services/sources/handleHttpSource.server.ts +++ b/apps/webapp/app/services/sources/handleHttpSource.server.ts @@ -1,9 +1,8 @@ import type { PrismaClient } from "~/db.server"; import { prisma } from "~/db.server"; import { workerQueue } from "../worker.server"; -import { requestUrl } from "~/utils/requestUrl.server"; -import { RuntimeEnvironmentType } from "@trigger.dev/database"; import { createHttpSourceRequest } from "~/utils/createHttpSourceRequest"; +import { RuntimeEnvironmentType } from "~/database-types"; export class HandleHttpSourceService { #prismaClient: PrismaClient; @@ -19,6 +18,7 @@ export class HandleHttpSourceService { endpoint: true, environment: true, secretReference: true, + organization: true, }, }); @@ -30,6 +30,14 @@ export class HandleHttpSourceService { return { status: 200 }; } + if (!triggerSource.endpoint.url) { + return { status: 404 }; + } + + if (!triggerSource.organization.runsEnabled) { + return { status: 404 }; + } + if (!triggerSource.interactive) { const sourceRequest = await createHttpSourceRequest(request); diff --git a/apps/webapp/app/services/sources/handleWebhookRequest.server.ts b/apps/webapp/app/services/sources/handleWebhookRequest.server.ts index d8265864e5..166d8025e1 100644 --- a/apps/webapp/app/services/sources/handleWebhookRequest.server.ts +++ b/apps/webapp/app/services/sources/handleWebhookRequest.server.ts @@ -1,10 +1,10 @@ import type { PrismaClient } from "~/db.server"; import { prisma } from "~/db.server"; import { workerQueue } from "../worker.server"; -import { RuntimeEnvironmentType } from "@trigger.dev/database"; import { createHttpSourceRequest } from "~/utils/createHttpSourceRequest"; import { WebhookContextMetadata } from "@trigger.dev/core"; import { createHash } from "crypto"; +import { RuntimeEnvironmentType } from "~/database-types"; export class HandleWebhookRequestService { #prismaClient: PrismaClient; @@ -37,8 +37,6 @@ export class HandleWebhookRequestService { const lockId = webhookIdToLockId(webhookEnvironment.webhookId); await this.#prismaClient.$transaction(async (tx) => { - await tx.$executeRaw`SELECT pg_advisory_xact_lock(${lockId})`; - const counter = await tx.webhookDeliveryCounter.upsert({ where: { webhookId: webhookEnvironment.id }, update: { lastNumber: { increment: 1 } }, diff --git a/apps/webapp/app/services/worker.server.ts b/apps/webapp/app/services/worker.server.ts index 98fdd80b03..d592482f0d 100644 --- a/apps/webapp/app/services/worker.server.ts +++ b/apps/webapp/app/services/worker.server.ts @@ -37,6 +37,13 @@ import { TimeoutDeploymentService } from "~/v3/services/timeoutDeployment.server import { eventRepository } from "~/v3/eventRepository.server"; import { ExecuteTasksWaitingForDeployService } from "~/v3/services/executeTasksWaitingForDeploy"; import { TriggerScheduledTaskService } from "~/v3/services/triggerScheduledTask.server"; +import { PerformTaskAttemptAlertsService } from "~/v3/services/alerts/performTaskAttemptAlerts.server"; +import { DeliverAlertService } from "~/v3/services/alerts/deliverAlert.server"; +import { PerformDeploymentAlertsService } from "~/v3/services/alerts/performDeploymentAlerts.server"; +import { GraphileMigrationHelperService } from "./db/graphileMigrationHelper.server"; +import { PerformBulkActionService } from "~/v3/services/bulk/performBulkAction.server"; +import { CancelTaskRunService } from "~/v3/services/cancelTaskRun.server"; +import { ReplayTaskRunService } from "~/v3/services/replayTaskRun.server"; const workerCatalog = { indexEndpoint: z.object({ @@ -136,6 +143,21 @@ const workerCatalog = { "v3.triggerScheduledTask": z.object({ instanceId: z.string(), }), + "v3.performTaskAttemptAlerts": z.object({ + attemptId: z.string(), + }), + "v3.deliverAlert": z.object({ + alertId: z.string(), + }), + "v3.performDeploymentAlerts": z.object({ + deploymentId: z.string(), + }), + "v3.performBulkAction": z.object({ + bulkActionGroupId: z.string(), + }), + "v3.performBulkActionItem": z.object({ + bulkActionItemId: z.string(), + }), }; const executionWorkerCatalog = { @@ -199,9 +221,8 @@ if (env.NODE_ENV === "production") { } export async function init() { - // const pgNotify = new PgNotifyService(); - // await pgNotify.call("trigger:graphile:migrate", { latestMigration: 10 }); - // await new Promise((resolve) => setTimeout(resolve, 10000)) + const migrationHelper = new GraphileMigrationHelperService(); + await migrationHelper.call(); if (env.WORKER_ENABLED === "true") { await workerQueue.initialize(); @@ -220,11 +241,6 @@ function getWorkerQueue() { return new ZodWorker({ name: "workerQueue", prisma, - cleanup: { - frequencyExpression: "13,27,43 * * * *", - ttl: 7 * 24 * 60 * 60 * 1000, // 7 days - maxCount: 1000, - }, runnerOptions: { connectionString: env.DATABASE_URL, concurrency: env.WORKER_CONCURRENCY, @@ -238,16 +254,22 @@ function getWorkerQueue() { recurringTasks: { // Run this every 5 minutes autoIndexProductionEndpoints: { - pattern: "*/5 * * * *", + match: "*/5 * * * *", handler: async (payload, job) => { const service = new RecurringEndpointIndexService(); await service.call(payload.ts); }, }, + scheduleImminentDeferredEvents: { + match: "*/10 * * * *", + handler: async (payload, job) => { + await DeliverScheduledEventService.scheduleImminentDeferredEvents(); + }, + }, // Run this every hour purgeOldIndexings: { - pattern: "0 * * * *", + match: "0 * * * *", handler: async (payload, job) => { // Delete indexings that are older than 7 days await prisma.endpointIndex.deleteMany({ @@ -261,7 +283,7 @@ function getWorkerQueue() { }, // Run this every hour at the 13 minute mark purgeOldTaskEvents: { - pattern: "47 * * * *", + match: "47 * * * *", handler: async (payload, job) => { await eventRepository.truncateEvents(); }, @@ -533,6 +555,51 @@ function getWorkerQueue() { return await service.call(payload.instanceId); }, }, + "v3.performTaskAttemptAlerts": { + priority: 0, + maxAttempts: 3, + handler: async (payload, job) => { + const service = new PerformTaskAttemptAlertsService(); + + return await service.call(payload.attemptId); + }, + }, + "v3.deliverAlert": { + priority: 0, + maxAttempts: 8, + handler: async (payload, job) => { + const service = new DeliverAlertService(); + + return await service.call(payload.alertId); + }, + }, + "v3.performDeploymentAlerts": { + priority: 0, + maxAttempts: 3, + handler: async (payload, job) => { + const service = new PerformDeploymentAlertsService(); + + return await service.call(payload.deploymentId); + }, + }, + "v3.performBulkAction": { + priority: 0, + maxAttempts: 3, + handler: async (payload, job) => { + const service = new PerformBulkActionService(); + + return await service.call(payload.bulkActionGroupId); + }, + }, + "v3.performBulkActionItem": { + priority: 0, + maxAttempts: 3, + handler: async (payload, job) => { + const service = new PerformBulkActionService(); + + await service.performBulkActionItem(payload.bulkActionItemId); + }, + }, }, }); } diff --git a/apps/webapp/app/services/environmentSort.server.ts b/apps/webapp/app/utils/environmentSort.ts similarity index 94% rename from apps/webapp/app/services/environmentSort.server.ts rename to apps/webapp/app/utils/environmentSort.ts index a1e22d70f0..d1e0ac9c3a 100644 --- a/apps/webapp/app/services/environmentSort.server.ts +++ b/apps/webapp/app/utils/environmentSort.ts @@ -1,5 +1,4 @@ import { RuntimeEnvironmentType } from "@trigger.dev/database"; -import { logger } from "./logger.server"; const environmentSortOrder: RuntimeEnvironmentType[] = [ "DEVELOPMENT", diff --git a/apps/webapp/app/utils/formData.ts b/apps/webapp/app/utils/formData.ts deleted file mode 100644 index 8290365263..0000000000 --- a/apps/webapp/app/utils/formData.ts +++ /dev/null @@ -1,10 +0,0 @@ -import type { z } from "zod"; - -export function formDataAsObject( - formData: FormData, - schema: z.ZodSchema -): TValues { - const object = Object.fromEntries(formData.entries()); - const parsed = schema.parse(object); - return parsed; -} diff --git a/apps/webapp/app/utils/numberFormatter.ts b/apps/webapp/app/utils/numberFormatter.ts index 4056bfec04..0103e9263e 100644 --- a/apps/webapp/app/utils/numberFormatter.ts +++ b/apps/webapp/app/utils/numberFormatter.ts @@ -4,6 +4,12 @@ export const formatNumberCompact = (num: number): string => { return compactFormatter.format(num); }; +const formatter = Intl.NumberFormat("en"); + +export const formatNumber = (num: number): string => { + return formatter.format(num); +}; + const roundedCurrencyFormatter = Intl.NumberFormat("en-US", { style: "currency", currencyDisplay: "symbol", diff --git a/apps/webapp/app/utils/pathBuilder.ts b/apps/webapp/app/utils/pathBuilder.ts index 6e5b3fc993..9741ba8311 100644 --- a/apps/webapp/app/utils/pathBuilder.ts +++ b/apps/webapp/app/utils/pathBuilder.ts @@ -325,6 +325,21 @@ export function v3NewEnvironmentVariablesPath(organization: OrgForPath, project: return `${v3EnvironmentVariablesPath(organization, project)}/new`; } +export function v3ProjectAlertsPath(organization: OrgForPath, project: ProjectForPath) { + return `${v3ProjectPath(organization, project)}/alerts`; +} + +export function v3NewProjectAlertPath(organization: OrgForPath, project: ProjectForPath) { + return `${v3ProjectAlertsPath(organization, project)}/new`; +} + +export function v3NewProjectAlertPathConnectToSlackPath( + organization: OrgForPath, + project: ProjectForPath +) { + return `${v3ProjectAlertsPath(organization, project)}/new/connect-to-slack`; +} + export function v3TestPath( organization: OrgForPath, project: ProjectForPath, diff --git a/apps/webapp/app/v3/marqs/devQueueConsumer.server.ts b/apps/webapp/app/v3/marqs/devQueueConsumer.server.ts index d136838514..d26759ec2a 100644 --- a/apps/webapp/app/v3/marqs/devQueueConsumer.server.ts +++ b/apps/webapp/app/v3/marqs/devQueueConsumer.server.ts @@ -12,7 +12,7 @@ import { prisma } from "~/db.server"; import { createNewSession, disconnectSession } from "~/models/runtimeEnvironment.server"; import { AuthenticatedEnvironment } from "~/services/apiAuth.server"; import { logger } from "~/services/logger.server"; -import { marqs } from "~/v3/marqs/index.server"; +import { marqs, sanitizeQueueName } from "~/v3/marqs/index.server"; import { EnvironmentVariablesRepository } from "../environmentVariables/environmentVariablesRepository.server"; import { generateFriendlyId } from "../friendlyIdentifiers"; import { CancelAttemptService } from "../services/cancelAttempt.server"; @@ -90,6 +90,10 @@ export class DevQueueConsumer { return; } + if (this._backgroundWorkers.has(backgroundWorker.id)) { + return; + } + this._backgroundWorkers.set(backgroundWorker.id, backgroundWorker); logger.debug("Registered background worker", { backgroundWorker: backgroundWorker.id }); @@ -271,10 +275,10 @@ export class DevQueueConsumer { return; } + this._enabled = true; // Create the session await createNewSession(this.env, this._options.ipAddress ?? "unknown"); - this._enabled = true; this._perTraceCountdown = this._options.maximumItemsPerTrace; this._lastNewTrace = new Date(); this._taskFailures = 0; @@ -417,6 +421,7 @@ export class DevQueueConsumer { lockedAt: new Date(), lockedById: backgroundTask.id, status: "EXECUTING", + lockedToVersionId: backgroundWorker.id, }, include: { attempts: { @@ -448,11 +453,21 @@ export class DevQueueConsumer { const queue = await prisma.taskQueue.findUnique({ where: { - runtimeEnvironmentId_name: { runtimeEnvironmentId: this.env.id, name: lockedTaskRun.queue }, + runtimeEnvironmentId_name: { + runtimeEnvironmentId: this.env.id, + name: sanitizeQueueName(lockedTaskRun.queue), + }, }, }); if (!queue) { + logger.debug("[DevQueueConsumer] Failed to find queue", { + queueName: lockedTaskRun.queue, + sanitizedName: sanitizeQueueName(lockedTaskRun.queue), + taskRun: lockedTaskRun.id, + messageId: message.messageId, + }); + await marqs?.nackMessage(message.messageId); setTimeout(() => this.#doWork(), 1000); return; diff --git a/apps/webapp/app/v3/marqs/sharedQueueConsumer.server.ts b/apps/webapp/app/v3/marqs/sharedQueueConsumer.server.ts index 399634ae97..b71b1176d0 100644 --- a/apps/webapp/app/v3/marqs/sharedQueueConsumer.server.ts +++ b/apps/webapp/app/v3/marqs/sharedQueueConsumer.server.ts @@ -21,11 +21,15 @@ import { z } from "zod"; import { prisma } from "~/db.server"; import { logger } from "~/services/logger.server"; import { singleton } from "~/utils/singleton"; -import { marqs } from "~/v3/marqs/index.server"; +import { marqs, sanitizeQueueName } from "~/v3/marqs/index.server"; import { EnvironmentVariablesRepository } from "../environmentVariables/environmentVariablesRepository.server"; import { generateFriendlyId } from "../friendlyIdentifiers"; import { socketIo } from "../handleSocketIo.server"; -import { findCurrentWorkerDeployment } from "../models/workerDeployment.server"; +import { + findCurrentWorkerDeployment, + getWorkerDeploymentFromWorker, + getWorkerDeploymentFromWorkerTask, +} from "../models/workerDeployment.server"; import { RestoreCheckpointService } from "../services/restoreCheckpoint.server"; import { SEMINTATTRS_FORCE_RECORDING, tracer } from "../tracer.server"; import { CrashTaskRunService } from "../services/crashTaskRun.server"; @@ -299,7 +303,12 @@ export class SharedQueueConsumer { return; } - const deployment = await findCurrentWorkerDeployment(existingTaskRun.runtimeEnvironmentId); + // Check if the task run is locked to a specific worker, if not, use the current worker deployment + const deployment = existingTaskRun.lockedById + ? await getWorkerDeploymentFromWorkerTask(existingTaskRun.lockedById) + : existingTaskRun.lockedToVersionId + ? await getWorkerDeploymentFromWorker(existingTaskRun.lockedToVersionId) + : await findCurrentWorkerDeployment(existingTaskRun.runtimeEnvironmentId); if (!deployment || !deployment.worker) { logger.error("No matching deployment found for task run", { @@ -374,6 +383,7 @@ export class SharedQueueConsumer { data: { lockedAt: new Date(), lockedById: backgroundTask.id, + lockedToVersionId: deployment.worker.id, }, include: { runtimeEnvironment: true, @@ -408,7 +418,7 @@ export class SharedQueueConsumer { where: { runtimeEnvironmentId_name: { runtimeEnvironmentId: lockedTaskRun.runtimeEnvironmentId, - name: lockedTaskRun.queue, + name: sanitizeQueueName(lockedTaskRun.queue), }, }, }); @@ -635,12 +645,17 @@ export class SharedQueueConsumer { where: { runtimeEnvironmentId_name: { runtimeEnvironmentId: resumableAttempt.runtimeEnvironmentId, - name: resumableRun.queue, + name: sanitizeQueueName(resumableRun.queue), }, }, }); if (!queue) { + logger.debug("SharedQueueConsumer queue not found, so nacking message", { + queueName: sanitizeQueueName(resumableRun.queue), + attempt: resumableAttempt, + }); + await this.#nackAndDoMoreWork(message.messageId, this._options.nextTickInterval); return; } diff --git a/apps/webapp/app/v3/models/workerDeployment.server.ts b/apps/webapp/app/v3/models/workerDeployment.server.ts index e9e19c1b5e..211a20f5cd 100644 --- a/apps/webapp/app/v3/models/workerDeployment.server.ts +++ b/apps/webapp/app/v3/models/workerDeployment.server.ts @@ -1,16 +1,30 @@ import type { Prettify } from "@trigger.dev/core"; import { CURRENT_DEPLOYMENT_LABEL } from "~/consts"; -import { prisma } from "~/db.server"; +import { Prisma, prisma } from "~/db.server"; -export type CurrentWorkerDeployment = Prettify>>>; +export type CurrentWorkerDeployment = Prettify< + NonNullable>> +>; -export async function findCurrentWorkerDeployment(environmentId: string) { +type WorkerDeploymentWithWorkerTasks = Prisma.WorkerDeploymentGetPayload<{ + include: { + worker: { + include: { + tasks: true; + }; + }; + }; +}>; + +export async function findCurrentWorkerDeployment( + environmentId: string +): Promise { const promotion = await prisma.workerDeploymentPromotion.findUnique({ where: { environmentId_label: { environmentId, label: CURRENT_DEPLOYMENT_LABEL, - } + }, }, include: { deployment: { @@ -20,10 +34,64 @@ export async function findCurrentWorkerDeployment(environmentId: string) { tasks: true, }, }, - } - } - } + }, + }, + }, }); return promotion?.deployment; -} \ No newline at end of file +} + +export async function getWorkerDeploymentFromWorker( + workerId: string +): Promise { + const worker = await prisma.backgroundWorker.findUnique({ + where: { + id: workerId, + }, + include: { + deployment: true, + tasks: true, + }, + }); + + if (!worker?.deployment) { + return; + } + + const { deployment, ...workerWithoutDeployment } = worker; + + return { + ...deployment, + worker: workerWithoutDeployment, + }; +} + +export async function getWorkerDeploymentFromWorkerTask( + workerTaskId: string +): Promise { + const workerTask = await prisma.backgroundWorkerTask.findUnique({ + where: { + id: workerTaskId, + }, + include: { + worker: { + include: { + deployment: true, + tasks: true, + }, + }, + }, + }); + + if (!workerTask?.worker.deployment) { + return; + } + + const { deployment, ...workerWithoutDeployment } = workerTask.worker; + + return { + ...deployment, + worker: workerWithoutDeployment, + }; +} diff --git a/apps/webapp/app/v3/services/alerts/createAlertChannel.server.ts b/apps/webapp/app/v3/services/alerts/createAlertChannel.server.ts new file mode 100644 index 0000000000..aca1fdd152 --- /dev/null +++ b/apps/webapp/app/v3/services/alerts/createAlertChannel.server.ts @@ -0,0 +1,114 @@ +import { + ProjectAlertChannel, + ProjectAlertType, + RuntimeEnvironmentType, +} from "@trigger.dev/database"; +import { nanoid } from "nanoid"; +import { env } from "~/env.server"; +import { findProjectByRef } from "~/models/project.server"; +import { encryptSecret } from "~/services/secrets/secretStore.server"; +import { generateFriendlyId } from "~/v3/friendlyIdentifiers"; +import { BaseService, ServiceValidationError } from "../baseService.server"; + +export type CreateAlertChannelOptions = { + name: string; + alertTypes: ProjectAlertType[]; + environmentTypes: RuntimeEnvironmentType[]; + deduplicationKey?: string; + channel: + | { + type: "EMAIL"; + email: string; + } + | { + type: "WEBHOOK"; + url: string; + secret?: string; + } + | { + type: "SLACK"; + channelId: string; + channelName: string; + integrationId: string | undefined; + }; +}; + +export class CreateAlertChannelService extends BaseService { + public async call( + projectRef: string, + userId: string, + options: CreateAlertChannelOptions + ): Promise { + const project = await findProjectByRef(projectRef, userId); + + if (!project) { + throw new ServiceValidationError("Project not found"); + } + + const environmentTypes = + options.environmentTypes.length === 0 + ? (["STAGING", "PRODUCTION"] satisfies RuntimeEnvironmentType[]) + : options.environmentTypes; + + const existingAlertChannel = options.deduplicationKey + ? await this._prisma.projectAlertChannel.findUnique({ + where: { + projectId_deduplicationKey: { + projectId: project.id, + deduplicationKey: options.deduplicationKey, + }, + }, + }) + : undefined; + + if (existingAlertChannel) { + return await this._prisma.projectAlertChannel.update({ + where: { id: existingAlertChannel.id }, + data: { + name: options.name, + alertTypes: options.alertTypes, + type: options.channel.type, + properties: await this.#createProperties(options.channel), + environmentTypes, + }, + }); + } + + const alertChannel = await this._prisma.projectAlertChannel.create({ + data: { + friendlyId: generateFriendlyId("alert_channel"), + name: options.name, + alertTypes: options.alertTypes, + projectId: project.id, + type: options.channel.type, + properties: await this.#createProperties(options.channel), + enabled: true, + deduplicationKey: options.deduplicationKey, + userProvidedDeduplicationKey: options.deduplicationKey ? true : false, + environmentTypes, + }, + }); + + return alertChannel; + } + + async #createProperties(channel: CreateAlertChannelOptions["channel"]) { + switch (channel.type) { + case "EMAIL": + return { + email: channel.email, + }; + case "WEBHOOK": + return { + url: channel.url, + secret: await encryptSecret(env.ENCRYPTION_KEY, channel.secret ?? nanoid()), + }; + case "SLACK": + return { + channelId: channel.channelId, + channelName: channel.channelName, + integrationId: channel.integrationId, + }; + } + } +} diff --git a/apps/webapp/app/v3/services/alerts/deliverAlert.server.ts b/apps/webapp/app/v3/services/alerts/deliverAlert.server.ts new file mode 100644 index 0000000000..2e72696c01 --- /dev/null +++ b/apps/webapp/app/v3/services/alerts/deliverAlert.server.ts @@ -0,0 +1,831 @@ +import { + ChatPostMessageArguments, + ErrorCode, + WebAPIHTTPError, + WebAPIPlatformError, + WebAPIRateLimitedError, + WebAPIRequestError, +} from "@slack/web-api"; +import { TaskRunError, createJsonErrorObject } from "@trigger.dev/core/v3"; +import assertNever from "assert-never"; +import { subtle } from "crypto"; +import { Prisma, PrismaClientOrTransaction, prisma } from "~/db.server"; +import { env } from "~/env.server"; +import { + OrgIntegrationRepository, + OrganizationIntegrationForService, +} from "~/models/orgIntegration.server"; +import { + ProjectAlertEmailProperties, + ProjectAlertSlackProperties, + ProjectAlertSlackStorage, + ProjectAlertWebhookProperties, +} from "~/models/projectAlert.server"; +import { DeploymentPresenter } from "~/presenters/v3/DeploymentPresenter.server"; +import { sendAlertEmail } from "~/services/email.server"; +import { logger } from "~/services/logger.server"; +import { decryptSecret } from "~/services/secrets/secretStore.server"; +import { workerQueue } from "~/services/worker.server"; +import { BaseService } from "../baseService.server"; + +type FoundAlert = Prisma.Result< + typeof prisma.projectAlert, + { + include: { + channel: true; + project: { + include: { + organization: true; + }; + }; + environment: true; + taskRunAttempt: { + include: { + taskRun: true; + backgroundWorkerTask: true; + backgroundWorker: true; + }; + }; + workerDeployment: { + include: { + worker: { + include: { + tasks: true; + }; + }; + }; + }; + }; + }, + "findUniqueOrThrow" +>; + +export class DeliverAlertService extends BaseService { + public async call(alertId: string) { + const alert = await this._prisma.projectAlert.findUnique({ + where: { id: alertId }, + include: { + channel: true, + project: { + include: { + organization: true, + }, + }, + environment: true, + taskRunAttempt: { + include: { + taskRun: true, + backgroundWorkerTask: true, + backgroundWorker: true, + }, + }, + workerDeployment: { + include: { + worker: { + include: { + tasks: true, + }, + }, + }, + }, + }, + }); + + if (!alert) { + return; + } + + if (alert.status !== "PENDING") { + return; + } + + switch (alert.channel.type) { + case "EMAIL": { + await this.#sendEmail(alert); + break; + } + case "SLACK": { + await this.#sendSlack(alert); + break; + } + case "WEBHOOK": { + await this.#sendWebhook(alert); + break; + } + default: { + assertNever(alert.channel.type); + } + } + + await this._prisma.projectAlert.update({ + where: { id: alertId }, + data: { + status: "SENT", + }, + }); + } + + async #sendEmail(alert: FoundAlert) { + const emailProperties = ProjectAlertEmailProperties.safeParse(alert.channel.properties); + + if (!emailProperties.success) { + logger.error("[DeliverAlert] Failed to parse email properties", { + issues: emailProperties.error.issues, + properties: alert.channel.properties, + }); + + return; + } + + switch (alert.type) { + case "TASK_RUN_ATTEMPT": { + if (alert.taskRunAttempt) { + const taskRunError = TaskRunError.safeParse(alert.taskRunAttempt.error); + + if (!taskRunError.success) { + logger.error("[DeliverAlert] Failed to parse task run error", { + issues: taskRunError.error.issues, + taskAttemptError: alert.taskRunAttempt.error, + }); + + return; + } + + await sendAlertEmail({ + email: "alert-attempt", + to: emailProperties.data.email, + taskIdentifier: alert.taskRunAttempt.taskRun.taskIdentifier, + fileName: alert.taskRunAttempt.backgroundWorkerTask.filePath, + exportName: alert.taskRunAttempt.backgroundWorkerTask.exportName, + version: alert.taskRunAttempt.backgroundWorker.version, + environment: alert.environment.slug, + error: createJsonErrorObject(taskRunError.data), + attemptLink: `${env.APP_ORIGIN}/projects/v3/${alert.project.externalRef}/runs/${alert.taskRunAttempt.taskRun.friendlyId}`, + }); + } else { + logger.error("[DeliverAlert] Task run attempt not found", { + alert, + }); + } + + break; + } + case "DEPLOYMENT_FAILURE": { + if (alert.workerDeployment) { + const preparedError = DeploymentPresenter.prepareErrorData( + alert.workerDeployment.errorData + ); + + if (!preparedError) { + logger.error("[DeliverAlert] Failed to prepare deployment error data", { + errorData: alert.workerDeployment.errorData, + }); + + return; + } + + await sendAlertEmail({ + email: "alert-deployment-failure", + to: emailProperties.data.email, + version: alert.workerDeployment.version, + environment: alert.environment.slug, + shortCode: alert.workerDeployment.shortCode, + failedAt: alert.workerDeployment.failedAt ?? new Date(), + error: preparedError, + deploymentLink: `${env.APP_ORIGIN}/projects/v3/${alert.project.externalRef}/deployments/${alert.workerDeployment.shortCode}`, + }); + } else { + logger.error("[DeliverAlert] Worker deployment not found", { + alert, + }); + } + + break; + } + case "DEPLOYMENT_SUCCESS": { + if (alert.workerDeployment) { + await sendAlertEmail({ + email: "alert-deployment-success", + to: emailProperties.data.email, + version: alert.workerDeployment.version, + environment: alert.environment.slug, + shortCode: alert.workerDeployment.shortCode, + deployedAt: alert.workerDeployment.deployedAt ?? new Date(), + deploymentLink: `${env.APP_ORIGIN}/projects/v3/${alert.project.externalRef}/deployments/${alert.workerDeployment.shortCode}`, + taskCount: alert.workerDeployment.worker?.tasks.length ?? 0, + }); + } else { + logger.error("[DeliverAlert] Worker deployment not found", { + alert, + }); + } + + break; + } + default: { + assertNever(alert.type); + } + } + } + + async #sendWebhook(alert: FoundAlert) { + const webhookProperties = ProjectAlertWebhookProperties.safeParse(alert.channel.properties); + + if (!webhookProperties.success) { + logger.error("[DeliverAlert] Failed to parse webhook properties", { + issues: webhookProperties.error.issues, + properties: alert.channel.properties, + }); + + return; + } + + switch (alert.type) { + case "TASK_RUN_ATTEMPT": { + if (alert.taskRunAttempt) { + const taskRunError = TaskRunError.safeParse(alert.taskRunAttempt.error); + + if (!taskRunError.success) { + logger.error("[DeliverAlert] Failed to parse task run error", { + issues: taskRunError.error.issues, + taskAttemptError: alert.taskRunAttempt.error, + }); + + return; + } + + const error = createJsonErrorObject(taskRunError.data); + + const payload = { + task: { + id: alert.taskRunAttempt.taskRun.taskIdentifier, + filePath: alert.taskRunAttempt.backgroundWorkerTask.filePath, + exportName: alert.taskRunAttempt.backgroundWorkerTask.exportName, + }, + attempt: { + id: alert.taskRunAttempt.friendlyId, + number: alert.taskRunAttempt.number, + startedAt: alert.taskRunAttempt.startedAt, + status: alert.taskRunAttempt.status, + }, + run: { + id: alert.taskRunAttempt.taskRun.friendlyId, + isTest: alert.taskRunAttempt.taskRun.isTest, + createdAt: alert.taskRunAttempt.taskRun.createdAt, + idempotencyKey: alert.taskRunAttempt.taskRun.idempotencyKey, + }, + environment: { + id: alert.environment.id, + type: alert.environment.type, + slug: alert.environment.slug, + }, + organization: { + id: alert.project.organizationId, + slug: alert.project.organization.slug, + name: alert.project.organization.title, + }, + project: { + id: alert.project.id, + ref: alert.project.externalRef, + slug: alert.project.slug, + name: alert.project.name, + }, + error, + }; + + await this.#deliverWebhook(payload, webhookProperties.data); + } else { + logger.error("[DeliverAlert] Task run attempt not found", { + alert, + }); + } + + break; + } + case "DEPLOYMENT_FAILURE": { + if (alert.workerDeployment) { + const preparedError = DeploymentPresenter.prepareErrorData( + alert.workerDeployment.errorData + ); + + if (!preparedError) { + logger.error("[DeliverAlert] Failed to prepare deployment error data", { + errorData: alert.workerDeployment.errorData, + }); + + return; + } + + const payload = { + deployment: { + id: alert.workerDeployment.friendlyId, + status: alert.workerDeployment.status, + version: alert.workerDeployment.version, + shortCode: alert.workerDeployment.shortCode, + failedAt: alert.workerDeployment.failedAt ?? new Date(), + }, + environment: { + id: alert.environment.id, + type: alert.environment.type, + slug: alert.environment.slug, + }, + organization: { + id: alert.project.organizationId, + slug: alert.project.organization.slug, + name: alert.project.organization.title, + }, + project: { + id: alert.project.id, + ref: alert.project.externalRef, + slug: alert.project.slug, + name: alert.project.name, + }, + error: preparedError, + }; + + await this.#deliverWebhook(payload, webhookProperties.data); + } else { + logger.error("[DeliverAlert] Worker deployment not found", { + alert, + }); + } + + break; + } + case "DEPLOYMENT_SUCCESS": { + if (alert.workerDeployment) { + const payload = { + deployment: { + id: alert.workerDeployment.friendlyId, + status: alert.workerDeployment.status, + version: alert.workerDeployment.version, + shortCode: alert.workerDeployment.shortCode, + deployedAt: alert.workerDeployment.deployedAt ?? new Date(), + }, + tasks: + alert.workerDeployment.worker?.tasks.map((task) => ({ + id: task.slug, + filePath: task.filePath, + exportName: task.exportName, + triggerSource: task.triggerSource, + })) ?? [], + environment: { + id: alert.environment.id, + type: alert.environment.type, + slug: alert.environment.slug, + }, + organization: { + id: alert.project.organizationId, + slug: alert.project.organization.slug, + name: alert.project.organization.title, + }, + project: { + id: alert.project.id, + ref: alert.project.externalRef, + slug: alert.project.slug, + name: alert.project.name, + }, + }; + + await this.#deliverWebhook(payload, webhookProperties.data); + } else { + logger.error("[DeliverAlert] Worker deployment not found", { + alert, + }); + } + + break; + } + default: { + assertNever(alert.type); + } + } + } + + async #sendSlack(alert: FoundAlert) { + const slackProperties = ProjectAlertSlackProperties.safeParse(alert.channel.properties); + + if (!slackProperties.success) { + logger.error("[DeliverAlert] Failed to parse slack properties", { + issues: slackProperties.error.issues, + properties: alert.channel.properties, + }); + + return; + } + + // Get the org integration + const integration = slackProperties.data.integrationId + ? await this._prisma.organizationIntegration.findUnique({ + where: { + id: slackProperties.data.integrationId, + organizationId: alert.project.organizationId, + }, + include: { + tokenReference: true, + }, + }) + : await this._prisma.organizationIntegration.findFirst({ + where: { + service: "SLACK", + organizationId: alert.project.organizationId, + }, + orderBy: { + createdAt: "desc", + }, + include: { + tokenReference: true, + }, + }); + + if (!integration) { + logger.error("[DeliverAlert] Slack integration not found", { + alert, + }); + + return; + } + + switch (alert.type) { + case "TASK_RUN_ATTEMPT": { + if (alert.taskRunAttempt) { + // Find existing storage by the run ID + const storage = await this._prisma.projectAlertStorage.findFirst({ + where: { + alertChannelId: alert.channel.id, + alertType: alert.type, + storageId: alert.taskRunAttempt.taskRunId, + }, + }); + + const storageData = storage + ? ProjectAlertSlackStorage.safeParse(storage.storageData) + : undefined; + + const thread_ts = + storageData && storageData.success ? storageData.data.message_ts : undefined; + + const taskRunError = TaskRunError.safeParse(alert.taskRunAttempt.error); + + if (!taskRunError.success) { + logger.error("[DeliverAlert] Failed to parse task run error", { + issues: taskRunError.error.issues, + taskAttemptError: alert.taskRunAttempt.error, + }); + + return; + } + + const error = createJsonErrorObject(taskRunError.data); + + const exportName = alert.taskRunAttempt.backgroundWorkerTask.exportName; + const version = alert.taskRunAttempt.backgroundWorker.version; + const environment = alert.environment.slug; + const taskIdentifier = alert.taskRunAttempt.backgroundWorkerTask.slug; + const timestamp = alert.taskRunAttempt.completedAt ?? new Date(); + const runId = alert.taskRunAttempt.taskRun.friendlyId; + const attemptNumber = alert.taskRunAttempt.number; + + const message = await this.#postSlackMessage(integration, { + thread_ts, + channel: slackProperties.data.channelId, + text: `Task error in ${alert.taskRunAttempt.backgroundWorkerTask.exportName} [${alert.taskRunAttempt.backgroundWorker.version}.${alert.environment.slug}]`, + blocks: [ + { + type: "section", + text: { + type: "mrkdwn", + text: `:rotating_light: Error in *${exportName}* __`, + }, + }, + { + type: "section", + text: { + type: "mrkdwn", + text: `\`\`\`${error.stackTrace ?? error.message}\`\`\``, + }, + }, + { + type: "context", + elements: [ + { + type: "mrkdwn", + text: `${runId}.${attemptNumber} | ${taskIdentifier} | ${version}.${environment} | ${alert.project.name}`, + }, + ], + }, + { + type: "divider", + }, + { + type: "actions", + elements: [ + { + type: "button", + text: { + type: "plain_text", + text: "Investigate", + }, + url: `${env.APP_ORIGIN}/projects/v3/${alert.project.externalRef}/runs/${alert.taskRunAttempt.taskRun.friendlyId}`, + }, + ], + }, + ], + }); + + // Upsert the storage + if (message.ts) { + if (storage) { + await this._prisma.projectAlertStorage.update({ + where: { + id: storage.id, + }, + data: { + storageData: { + message_ts: message.ts, + }, + }, + }); + } else { + await this._prisma.projectAlertStorage.create({ + data: { + alertChannelId: alert.channel.id, + alertType: alert.type, + storageId: alert.taskRunAttempt.taskRunId, + storageData: { + message_ts: message.ts, + }, + projectId: alert.project.id, + }, + }); + } + } + } else { + logger.error("[DeliverAlert] Task run attempt not found", { + alert, + }); + } + + break; + } + case "DEPLOYMENT_FAILURE": { + if (alert.workerDeployment) { + const preparedError = DeploymentPresenter.prepareErrorData( + alert.workerDeployment.errorData + ); + + if (!preparedError) { + logger.error("[DeliverAlert] Failed to prepare deployment error data", { + errorData: alert.workerDeployment.errorData, + }); + + return; + } + + const version = alert.workerDeployment.version; + const environment = alert.environment.slug; + const timestamp = alert.workerDeployment.failedAt ?? new Date(); + + await this.#postSlackMessage(integration, { + channel: slackProperties.data.channelId, + blocks: [ + { + type: "section", + text: { + type: "mrkdwn", + text: `:rotating_light: Deployment failed *${version}.${environment}* __`, + }, + }, + { + type: "section", + text: { + type: "mrkdwn", + text: `\`\`\`${preparedError.stack ?? preparedError.message}\`\`\``, + }, + }, + { + type: "context", + elements: [ + { + type: "mrkdwn", + text: `${alert.workerDeployment.shortCode} | ${version}.${environment} | ${alert.project.name}`, + }, + ], + }, + { + type: "actions", + elements: [ + { + type: "button", + text: { + type: "plain_text", + text: "View Deployment", + }, + url: `${env.APP_ORIGIN}/projects/v3/${alert.project.externalRef}/deployments/${alert.workerDeployment.shortCode}`, + }, + ], + }, + ], + }); + } else { + logger.error("[DeliverAlert] Worker deployment not found", { + alert, + }); + } + + break; + } + case "DEPLOYMENT_SUCCESS": { + if (alert.workerDeployment) { + const version = alert.workerDeployment.version; + const environment = alert.environment.slug; + const numberOfTasks = alert.workerDeployment.worker?.tasks.length ?? 0; + const timestamp = alert.workerDeployment.deployedAt ?? new Date(); + + await this.#postSlackMessage(integration, { + channel: slackProperties.data.channelId, + text: `Deployment ${alert.workerDeployment.version} [${alert.environment.slug}] succeeded`, + blocks: [ + { + type: "section", + text: { + type: "mrkdwn", + text: `:rocket: Deployed *${version}.${environment}* successfully __`, + }, + }, + { + type: "context", + elements: [ + { + type: "mrkdwn", + text: `${numberOfTasks} tasks | ${alert.workerDeployment.shortCode} | ${version}.${environment} | ${alert.project.name}`, + }, + ], + }, + { + type: "actions", + elements: [ + { + type: "button", + text: { + type: "plain_text", + text: "View Deployment", + }, + url: `${env.APP_ORIGIN}/projects/v3/${alert.project.externalRef}/deployments/${alert.workerDeployment.shortCode}`, + }, + ], + }, + ], + }); + + return; + } else { + logger.error("[DeliverAlert] Worker deployment not found", { + alert, + }); + + return; + } + } + } + } + + async #deliverWebhook(payload: any, webhook: ProjectAlertWebhookProperties) { + const rawPayload = JSON.stringify(payload); + const hashPayload = Buffer.from(rawPayload, "utf-8"); + + const secret = await decryptSecret(env.ENCRYPTION_KEY, webhook.secret); + + const hmacSecret = Buffer.from(secret, "utf-8"); + const key = await subtle.importKey( + "raw", + hmacSecret, + { name: "HMAC", hash: "SHA-256" }, + false, + ["sign"] + ); + const signature = await subtle.sign("HMAC", key, hashPayload); + const signatureHex = Buffer.from(signature).toString("hex"); + + // Send the webhook to the URL specified in webhook.url + const response = await fetch(webhook.url, { + method: "POST", + headers: { + "content-type": "application/json", + "x-trigger-signature-hmacsha256": signatureHex, + }, + body: rawPayload, + signal: AbortSignal.timeout(5000), + }); + + if (!response.ok) { + logger.error("[DeliverAlert] Failed to send alert webhook", { + status: response.status, + statusText: response.statusText, + url: webhook.url, + body: payload, + signature, + }); + + throw new Error(`Failed to send alert webhook to ${webhook.url}`); + } + } + + async #postSlackMessage( + integration: OrganizationIntegrationForService<"SLACK">, + message: ChatPostMessageArguments + ) { + const client = await OrgIntegrationRepository.getAuthenticatedClientForIntegration( + integration, + { forceBotToken: true } + ); + + try { + return await client.chat.postMessage(message); + } catch (error) { + if (isWebAPIRateLimitedError(error)) { + logger.error("[DeliverAlert] Slack rate limited", { + error, + message, + }); + + throw new Error("Slack rate limited"); + } + + if (isWebAPIHTTPError(error)) { + logger.error("[DeliverAlert] Slack HTTP error", { + error, + message, + }); + + throw new Error("Slack HTTP error"); + } + + if (isWebAPIRequestError(error)) { + logger.error("[DeliverAlert] Slack request error", { + error, + message, + }); + + throw new Error("Slack request error"); + } + + if (isWebAPIPlatformError(error)) { + logger.error("[DeliverAlert] Slack platform error", { + error, + message, + }); + + throw new Error("Slack platform error"); + } + + logger.error("[DeliverAlert] Failed to send slack message", { + error, + message, + }); + + throw error; + } + } + + static async enqueue( + alertId: string, + tx: PrismaClientOrTransaction, + options?: { runAt?: Date; queueName?: string } + ) { + return await workerQueue.enqueue( + "v3.deliverAlert", + { + alertId, + }, + { + tx, + runAt: options?.runAt, + jobKey: `deliverAlert:${alertId}`, + queueName: options?.queueName, + } + ); + } +} + +function isWebAPIPlatformError(error: unknown): error is WebAPIPlatformError { + return (error as WebAPIPlatformError).code === ErrorCode.PlatformError; +} + +function isWebAPIRequestError(error: unknown): error is WebAPIRequestError { + return (error as WebAPIRequestError).code === ErrorCode.RequestError; +} + +function isWebAPIHTTPError(error: unknown): error is WebAPIHTTPError { + return (error as WebAPIHTTPError).code === ErrorCode.HTTPError; +} + +function isWebAPIRateLimitedError(error: unknown): error is WebAPIRateLimitedError { + return (error as WebAPIRateLimitedError).code === ErrorCode.RateLimitedError; +} diff --git a/apps/webapp/app/v3/services/alerts/performDeploymentAlerts.server.ts b/apps/webapp/app/v3/services/alerts/performDeploymentAlerts.server.ts new file mode 100644 index 0000000000..8f8b945e6e --- /dev/null +++ b/apps/webapp/app/v3/services/alerts/performDeploymentAlerts.server.ts @@ -0,0 +1,80 @@ +import { ProjectAlertChannel, ProjectAlertType, WorkerDeployment } from "@trigger.dev/database"; +import { $transaction, PrismaClientOrTransaction } from "~/db.server"; +import { workerQueue } from "~/services/worker.server"; +import { generateFriendlyId } from "~/v3/friendlyIdentifiers"; +import { BaseService } from "../baseService.server"; +import { DeliverAlertService } from "./deliverAlert.server"; + +export class PerformDeploymentAlertsService extends BaseService { + public async call(deploymentId: string) { + const deployment = await this._prisma.workerDeployment.findUnique({ + where: { id: deploymentId }, + include: { + environment: true, + }, + }); + + if (!deployment) { + return; + } + + const alertType = + deployment.status === "DEPLOYED" ? "DEPLOYMENT_SUCCESS" : "DEPLOYMENT_FAILURE"; + + // Find all the alert channels + const alertChannels = await this._prisma.projectAlertChannel.findMany({ + where: { + projectId: deployment.projectId, + alertTypes: { + has: alertType, + }, + environmentTypes: { + has: deployment.environment.type, + }, + enabled: true, + }, + }); + + for (const alertChannel of alertChannels) { + await this.#createAndSendAlert(alertChannel, deployment, alertType); + } + } + + async #createAndSendAlert( + alertChannel: ProjectAlertChannel, + deployment: WorkerDeployment, + alertType: ProjectAlertType + ) { + await $transaction(this._prisma, async (tx) => { + const alert = await this._prisma.projectAlert.create({ + data: { + friendlyId: generateFriendlyId("alert"), + channelId: alertChannel.id, + projectId: deployment.projectId, + environmentId: deployment.environmentId, + status: "PENDING", + type: alertType, + workerDeploymentId: deployment.id, + }, + }); + + await DeliverAlertService.enqueue(alert.id, tx, { + queueName: `alert-channel:${alertChannel.id}`, + }); + }); + } + + static async enqueue(deploymentId: string, tx: PrismaClientOrTransaction, runAt?: Date) { + return await workerQueue.enqueue( + "v3.performDeploymentAlerts", + { + deploymentId, + }, + { + tx, + runAt, + jobKey: `performDeploymentAlerts:${deploymentId}`, + } + ); + } +} diff --git a/apps/webapp/app/v3/services/alerts/performTaskAttemptAlerts.server.ts b/apps/webapp/app/v3/services/alerts/performTaskAttemptAlerts.server.ts new file mode 100644 index 0000000000..b0599c6137 --- /dev/null +++ b/apps/webapp/app/v3/services/alerts/performTaskAttemptAlerts.server.ts @@ -0,0 +1,81 @@ +import { Prisma, ProjectAlertChannel } from "@trigger.dev/database"; +import { $transaction, PrismaClientOrTransaction, prisma } from "~/db.server"; +import { workerQueue } from "~/services/worker.server"; +import { generateFriendlyId } from "~/v3/friendlyIdentifiers"; +import { BaseService } from "../baseService.server"; +import { DeliverAlertService } from "./deliverAlert.server"; + +type FoundTaskAttempt = Prisma.Result< + typeof prisma.taskRunAttempt, + { include: { taskRun: true; backgroundWorkerTask: true; runtimeEnvironment: true } }, + "findUniqueOrThrow" +>; + +export class PerformTaskAttemptAlertsService extends BaseService { + public async call(attemptId: string) { + const taskAttempt = await this._prisma.taskRunAttempt.findUnique({ + where: { id: attemptId }, + include: { + taskRun: true, + backgroundWorkerTask: true, + runtimeEnvironment: true, + }, + }); + + if (!taskAttempt) { + return; + } + + // Find all the alert channels + const alertChannels = await this._prisma.projectAlertChannel.findMany({ + where: { + projectId: taskAttempt.taskRun.projectId, + alertTypes: { + has: "TASK_RUN_ATTEMPT", + }, + environmentTypes: { + has: taskAttempt.runtimeEnvironment.type, + }, + enabled: true, + }, + }); + + for (const alertChannel of alertChannels) { + await this.#createAndSendAlert(alertChannel, taskAttempt); + } + } + + async #createAndSendAlert(alertChannel: ProjectAlertChannel, taskAttempt: FoundTaskAttempt) { + await $transaction(this._prisma, async (tx) => { + const alert = await this._prisma.projectAlert.create({ + data: { + friendlyId: generateFriendlyId("alert"), + channelId: alertChannel.id, + projectId: taskAttempt.taskRun.projectId, + environmentId: taskAttempt.runtimeEnvironmentId, + status: "PENDING", + type: "TASK_RUN_ATTEMPT", + taskRunAttemptId: taskAttempt.id, + }, + }); + + await DeliverAlertService.enqueue(alert.id, tx, { + queueName: `alert-channel:${alertChannel.id}`, + }); + }); + } + + static async enqueue(attemptId: string, tx: PrismaClientOrTransaction, runAt?: Date) { + return await workerQueue.enqueue( + "v3.performTaskAttemptAlerts", + { + attemptId, + }, + { + tx, + runAt, + jobKey: `performTaskAttemptAlerts:${attemptId}`, + } + ); + } +} diff --git a/apps/webapp/app/v3/services/bulk/createBulkAction.server.ts b/apps/webapp/app/v3/services/bulk/createBulkAction.server.ts new file mode 100644 index 0000000000..9662974716 --- /dev/null +++ b/apps/webapp/app/v3/services/bulk/createBulkAction.server.ts @@ -0,0 +1,60 @@ +import { BulkActionType } from "@trigger.dev/database"; +import { bulkActionVerb } from "~/components/runs/v3/BulkAction"; +import { logger } from "~/services/logger.server"; +import { generateFriendlyId } from "../../friendlyIdentifiers"; +import { BaseService } from "../baseService.server"; +import { PerformBulkActionService } from "./performBulkAction.server"; +import { BULK_ACTION_RUN_LIMIT } from "~/consts"; + +type BulkAction = { + projectId: string; + action: BulkActionType; + runIds: string[]; +}; + +export class CreateBulkActionService extends BaseService { + public async call({ projectId, action, runIds }: BulkAction) { + const group = await this._prisma.bulkActionGroup.create({ + data: { + friendlyId: generateFriendlyId("bulk"), + projectId, + type: action, + }, + }); + + //limit to the first X runs + const passedTooManyRuns = runIds.length > BULK_ACTION_RUN_LIMIT; + runIds = runIds.slice(0, BULK_ACTION_RUN_LIMIT); + + const items = await this._prisma.bulkActionItem.createMany({ + data: runIds.map((runId) => ({ + friendlyId: generateFriendlyId("bulkitem"), + type: action, + groupId: group.id, + sourceRunId: runId, + })), + }); + + logger.debug("Created bulk action group", { + groupId: group.id, + action, + runIds, + }); + + await PerformBulkActionService.enqueue(group.id, this._prisma); + + let message = bulkActionVerb(action); + if (passedTooManyRuns) { + message += ` the first ${BULK_ACTION_RUN_LIMIT} runs`; + } else { + message += ` ${runIds.length} runs`; + } + + return { + id: group.id, + friendlyId: group.friendlyId, + runCount: runIds.length, + message, + }; + } +} diff --git a/apps/webapp/app/v3/services/bulk/performBulkAction.server.ts b/apps/webapp/app/v3/services/bulk/performBulkAction.server.ts new file mode 100644 index 0000000000..26d97a1dbe --- /dev/null +++ b/apps/webapp/app/v3/services/bulk/performBulkAction.server.ts @@ -0,0 +1,126 @@ +import assertNever from "assert-never"; +import { PrismaClientOrTransaction } from "~/db.server"; +import { workerQueue } from "~/services/worker.server"; +import { BaseService } from "../baseService.server"; +import { CancelTaskRunService } from "../cancelTaskRun.server"; +import { ReplayTaskRunService } from "../replayTaskRun.server"; + +export class PerformBulkActionService extends BaseService { + public async performBulkActionItem(bulkActionItemId: string) { + const item = await this._prisma.bulkActionItem.findUnique({ + where: { id: bulkActionItemId }, + include: { + group: true, + sourceRun: true, + destinationRun: true, + }, + }); + + if (!item) { + return; + } + + if (item.status !== "PENDING") { + return; + } + + switch (item.group.type) { + case "REPLAY": { + const service = new ReplayTaskRunService(this._prisma); + const result = await service.call(item.sourceRun); + + await this._prisma.bulkActionItem.update({ + where: { id: item.id }, + data: { + destinationRunId: result?.id, + status: result ? "COMPLETED" : "FAILED", + error: result ? undefined : "Failed to replay task run", + }, + }); + + break; + } + case "CANCEL": { + const service = new CancelTaskRunService(this._prisma); + + const result = await service.call(item.sourceRun); + + await this._prisma.bulkActionItem.update({ + where: { id: item.id }, + data: { + destinationRunId: item.sourceRun.id, + status: result ? "COMPLETED" : "FAILED", + error: result ? undefined : "Task wasn't cancelable", + }, + }); + + break; + } + default: { + assertNever(item.group.type); + } + } + + const groupItems = await this._prisma.bulkActionItem.findMany({ + where: { groupId: item.groupId }, + select: { + status: true, + }, + }); + + const isGroupCompleted = groupItems.every((item) => item.status !== "PENDING"); + + if (isGroupCompleted) { + await this._prisma.bulkActionItem.update({ + where: { id: item.id }, + data: { + status: "COMPLETED", + }, + }); + } + } + + public async enqueueBulkActionItem(bulkActionItemId: string, groupId: string) { + await workerQueue.enqueue( + "v3.performBulkActionItem", + { + bulkActionItemId, + }, + { + jobKey: `performBulkActionItem:${bulkActionItemId}`, + queueName: `bulkActionItem:${groupId}`, + } + ); + } + + public async call(bulkActionGroupId: string) { + const actionGroup = await this._prisma.bulkActionGroup.findUnique({ + include: { + items: true, + }, + where: { id: bulkActionGroupId }, + }); + + if (!actionGroup) { + return; + } + + for (const item of actionGroup.items) { + await this.enqueueBulkActionItem(item.id, bulkActionGroupId); + } + } + + static async enqueue(bulkActionGroupId: string, tx: PrismaClientOrTransaction, runAt?: Date) { + return await workerQueue.enqueue( + "v3.performBulkAction", + { + bulkActionGroupId, + }, + { + tx, + runAt, + jobKey: `performBulkAction:${bulkActionGroupId}`, + } + ); + } +} diff --git a/apps/webapp/app/v3/services/cancelTaskRun.server.ts b/apps/webapp/app/v3/services/cancelTaskRun.server.ts index 6e86af3d46..72179a0dce 100644 --- a/apps/webapp/app/v3/services/cancelTaskRun.server.ts +++ b/apps/webapp/app/v3/services/cancelTaskRun.server.ts @@ -1,12 +1,12 @@ import { Prisma, TaskRun, TaskRunAttemptStatus, TaskRunStatus } from "@trigger.dev/database"; -import { eventRepository } from "../eventRepository.server"; +import assertNever from "assert-never"; +import { logger } from "~/services/logger.server"; import { marqs } from "~/v3/marqs/index.server"; +import { eventRepository } from "../eventRepository.server"; +import { socketIo } from "../handleSocketIo.server"; import { devPubSub } from "../marqs/devPubSub.server"; import { BaseService } from "./baseService.server"; -import { socketIo } from "../handleSocketIo.server"; import { CancelAttemptService } from "./cancelAttempt.server"; -import { logger } from "~/services/logger.server"; -import assertNever from "assert-never"; export const CANCELLABLE_STATUSES: Array = [ "PENDING", @@ -97,6 +97,10 @@ export class CancelTaskRunService extends BaseService { if (opts.cancelAttempts) { await this.#cancelPotentiallyRunningAttempts(cancelledTaskRun, cancelledTaskRun.attempts); } + + return { + id: cancelledTaskRun.id, + }; } async #cancelPotentiallyRunningAttempts(run: TaskRun, attempts: ExtendedTaskRunAttempt[]) { diff --git a/apps/webapp/app/v3/services/completeAttempt.server.ts b/apps/webapp/app/v3/services/completeAttempt.server.ts index ae16564ea5..d18e3f16b7 100644 --- a/apps/webapp/app/v3/services/completeAttempt.server.ts +++ b/apps/webapp/app/v3/services/completeAttempt.server.ts @@ -19,6 +19,7 @@ import { ResumeTaskRunDependenciesService } from "./resumeTaskRunDependencies.se import { MAX_TASK_RUN_ATTEMPTS } from "~/consts"; import { CreateCheckpointService } from "./createCheckpoint.server"; import { TaskRun } from "@trigger.dev/database"; +import { PerformTaskAttemptAlertsService } from "./alerts/performTaskAttemptAlerts.server"; type FoundAttempt = Awaited>; @@ -154,9 +155,13 @@ export class CompleteAttemptService extends BaseService { }, }); - if (completion.retry !== undefined && taskRunAttempt.number < MAX_TASK_RUN_ATTEMPTS) { - const environment = env ?? (await this.#getEnvironment(execution.environment.id)); + const environment = env ?? (await this.#getEnvironment(execution.environment.id)); + + if (environment.type !== "DEVELOPMENT") { + await PerformTaskAttemptAlertsService.enqueue(taskRunAttempt.id, this._prisma); + } + if (completion.retry !== undefined && taskRunAttempt.number < MAX_TASK_RUN_ATTEMPTS) { const retryAt = new Date(completion.retry.timestamp); // Retry the task run diff --git a/apps/webapp/app/v3/services/createBackgroundWorker.server.ts b/apps/webapp/app/v3/services/createBackgroundWorker.server.ts index 4137d7a16c..2deb2c5373 100644 --- a/apps/webapp/app/v3/services/createBackgroundWorker.server.ts +++ b/apps/webapp/app/v3/services/createBackgroundWorker.server.ts @@ -172,7 +172,7 @@ export async function createBackgroundTasks( }, }); - if (taskQueue.concurrencyLimit) { + if (typeof taskQueue.concurrencyLimit === "number") { await marqs?.updateQueueConcurrencyLimits( environment, taskQueue.name, diff --git a/apps/webapp/app/v3/services/createDeployedBackgroundWorker.server.ts b/apps/webapp/app/v3/services/createDeployedBackgroundWorker.server.ts index 9ef6333f33..920fbeb4c0 100644 --- a/apps/webapp/app/v3/services/createDeployedBackgroundWorker.server.ts +++ b/apps/webapp/app/v3/services/createDeployedBackgroundWorker.server.ts @@ -9,6 +9,7 @@ import { projectPubSub } from "./projectPubSub.server"; import { marqs } from "~/v3/marqs/index.server"; import { logger } from "~/services/logger.server"; import { ExecuteTasksWaitingForDeployService } from "./executeTasksWaitingForDeploy"; +import { PerformDeploymentAlertsService } from "./alerts/performDeploymentAlerts.server"; export class CreateDeployedBackgroundWorkerService extends BaseService { public async call( @@ -98,6 +99,7 @@ export class CreateDeployedBackgroundWorkerService extends BaseService { } await ExecuteTasksWaitingForDeployService.enqueue(backgroundWorker.id, this._prisma); + await PerformDeploymentAlertsService.enqueue(deployment.id, this._prisma); return backgroundWorker; }); diff --git a/apps/webapp/app/v3/services/createOrgIntegration.server.ts b/apps/webapp/app/v3/services/createOrgIntegration.server.ts new file mode 100644 index 0000000000..4db6581c9a --- /dev/null +++ b/apps/webapp/app/v3/services/createOrgIntegration.server.ts @@ -0,0 +1,35 @@ +import { OrganizationIntegration } from "@trigger.dev/database"; +import { BaseService } from "./baseService.server"; +import { WebClient } from "@slack/web-api"; +import { env } from "~/env.server"; +import { $transaction } from "~/db.server"; +import { getSecretStore } from "~/services/secrets/secretStore.server"; +import { generateFriendlyId } from "../friendlyIdentifiers"; +import { OrgIntegrationRepository } from "~/models/orgIntegration.server"; + +export class CreateOrgIntegrationService extends BaseService { + public async call( + userId: string, + orgId: string, + serviceName: string, + code: string + ): Promise { + // Get the org + const org = await this._prisma.organization.findUnique({ + where: { + id: orgId, + members: { + some: { + userId, + }, + }, + }, + }); + + if (!org) { + throw new Error("Organization not found"); + } + + return OrgIntegrationRepository.createOrgIntegration(serviceName, code, org); + } +} diff --git a/apps/webapp/app/v3/services/deploymentIndexFailed.server.ts b/apps/webapp/app/v3/services/deploymentIndexFailed.server.ts index 9fa5c15f87..0c3242688b 100644 --- a/apps/webapp/app/v3/services/deploymentIndexFailed.server.ts +++ b/apps/webapp/app/v3/services/deploymentIndexFailed.server.ts @@ -1,3 +1,4 @@ +import { PerformDeploymentAlertsService } from "./alerts/performDeploymentAlerts.server"; import { BaseService } from "./baseService.server"; export class DeploymentIndexFailed extends BaseService { @@ -22,6 +23,8 @@ export class DeploymentIndexFailed extends BaseService { }, }); + await PerformDeploymentAlertsService.enqueue(deployment.id, this._prisma); + return deployment; } } diff --git a/apps/webapp/app/v3/services/restoreCheckpoint.server.ts b/apps/webapp/app/v3/services/restoreCheckpoint.server.ts index 9e362f4d92..bca2713063 100644 --- a/apps/webapp/app/v3/services/restoreCheckpoint.server.ts +++ b/apps/webapp/app/v3/services/restoreCheckpoint.server.ts @@ -81,6 +81,22 @@ export class RestoreCheckpointService extends BaseService { return; } + const restoreEvent = await this._prisma.checkpointRestoreEvent.findFirst({ + where: { + checkpointId: checkpoint.id, + type: "RESTORE", + }, + }); + + if (restoreEvent) { + logger.error("Restore event already exists", { + checkpointId: checkpoint.id, + restoreEventId: restoreEvent.id, + }); + + return; + } + const eventService = new CreateCheckpointRestoreEventService(this._prisma); await eventService.restore({ checkpointId: checkpoint.id }); diff --git a/apps/webapp/app/v3/services/startDeploymentIndexing.server.ts b/apps/webapp/app/v3/services/startDeploymentIndexing.server.ts index 9a0156dba9..7d03147e7b 100644 --- a/apps/webapp/app/v3/services/startDeploymentIndexing.server.ts +++ b/apps/webapp/app/v3/services/startDeploymentIndexing.server.ts @@ -15,9 +15,10 @@ export class StartDeploymentIndexing extends BaseService { friendlyId: deploymentId, }, data: { - imageReference: registryProxy - ? registryProxy.rewriteImageReference(body.imageReference) - : body.imageReference, + imageReference: + registryProxy && body.selfHosted !== true + ? registryProxy.rewriteImageReference(body.imageReference) + : body.imageReference, status: "DEPLOYING", }, }); diff --git a/apps/webapp/app/v3/services/timeoutDeployment.server.ts b/apps/webapp/app/v3/services/timeoutDeployment.server.ts index d269a750f7..c9ed0b52b6 100644 --- a/apps/webapp/app/v3/services/timeoutDeployment.server.ts +++ b/apps/webapp/app/v3/services/timeoutDeployment.server.ts @@ -1,6 +1,7 @@ import { logger } from "~/services/logger.server"; import { BaseService } from "./baseService.server"; import { workerQueue } from "~/services/worker.server"; +import { PerformDeploymentAlertsService } from "./alerts/performDeploymentAlerts.server"; export class TimeoutDeploymentService extends BaseService { public async call(id: string, fromStatus: string, errorMessage: string) { @@ -32,6 +33,8 @@ export class TimeoutDeploymentService extends BaseService { errorData: { message: errorMessage, name: "TimeoutError" }, }, }); + + await PerformDeploymentAlertsService.enqueue(deployment.id, this._prisma); } static async enqueue( diff --git a/apps/webapp/app/v3/services/triggerTask.server.ts b/apps/webapp/app/v3/services/triggerTask.server.ts index ddacf1b1b9..b906d2f4ef 100644 --- a/apps/webapp/app/v3/services/triggerTask.server.ts +++ b/apps/webapp/app/v3/services/triggerTask.server.ts @@ -88,8 +88,6 @@ export class TriggerTaskService extends BaseService { const lockId = taskIdentifierToLockId(taskId); const run = await $transaction(this._prisma, async (tx) => { - await tx.$executeRaw`SELECT pg_advisory_xact_lock(${lockId})`; - const lockedToBackgroundWorker = body.options?.lockToVersion ? await tx.backgroundWorker.findUnique({ where: { diff --git a/apps/webapp/app/v3/tracer.server.ts b/apps/webapp/app/v3/tracer.server.ts index f95c894bf3..70acbce9f0 100644 --- a/apps/webapp/app/v3/tracer.server.ts +++ b/apps/webapp/app/v3/tracer.server.ts @@ -100,7 +100,7 @@ function getTracer() { if (env.INTERNAL_OTEL_TRACE_EXPORTER_URL) { const exporter = new OTLPTraceExporter({ url: env.INTERNAL_OTEL_TRACE_EXPORTER_URL, - timeoutMillis: 1000, + timeoutMillis: 10_000, headers: env.INTERNAL_OTEL_TRACE_EXPORTER_AUTH_HEADER_NAME && env.INTERNAL_OTEL_TRACE_EXPORTER_AUTH_HEADER_VALUE diff --git a/apps/webapp/package.json b/apps/webapp/package.json index c9c2df2923..fb6f8d3dbf 100644 --- a/apps/webapp/package.json +++ b/apps/webapp/package.json @@ -9,6 +9,7 @@ "build:remix": "remix build", "build:server": "esbuild --platform=node --format=cjs ./server.ts --outdir=build", "dev": "cross-env PORT=3030 remix dev -c \"node ./build/server.js\"", + "dev:worker": "cross-env NODE_PATH=../../node_modules/.pnpm/node_modules node ./build/server.js", "format": "prettier --write .", "lint": "eslint --cache --cache-location ./node_modules/.cache/eslint .", "start": "cross-env NODE_ENV=production node --max-old-space-size=8192 ./build/server.js", @@ -30,6 +31,8 @@ "/public/build" ], "dependencies": { + "@ariakit/react": "^0.4.6", + "@ariakit/react-core": "^0.4.6", "@aws-sdk/client-sqs": "^3.445.0", "@codemirror/autocomplete": "^6.3.1", "@codemirror/commands": "^6.1.2", @@ -82,6 +85,7 @@ "@remix-run/serve": "2.1.0", "@remix-run/server-runtime": "2.1.0", "@remix-run/v1-meta": "^0.1.3", + "@slack/web-api": "^6.8.1", "@socket.io/redis-adapter": "^8.3.0", "@tabler/icons-react": "^2.39.0", "@tailwindcss/container-queries": "^0.1.1", @@ -113,7 +117,7 @@ "evt": "^2.4.13", "express": "^4.18.1", "framer-motion": "^10.12.11", - "graphile-worker": "^0.13.0", + "graphile-worker": "0.16.6", "highlight.run": "^7.3.4", "humanize-duration": "^3.27.3", "intl-parse-accept-language": "^1.0.0", @@ -124,14 +128,16 @@ "lodash.omit": "^4.5.0", "lucide-react": "^0.229.0", "marked": "^4.0.18", + "match-sorter": "^6.3.4", "morgan": "^1.10.0", "nanoid": "^3.3.4", "non.geist": "^1.0.2", "ohash": "^1.1.3", "openai": "^4.33.1", + "parse-duration": "^1.1.0", "posthog-js": "^1.93.3", "posthog-node": "^3.1.3", - "prism-react-renderer": "^1.3.5", + "prism-react-renderer": "^2.3.1", "prismjs": "^1.29.0", "prom-client": "^15.1.0", "random-words": "^2.0.0", @@ -232,4 +238,4 @@ "engines": { "node": ">=16.0.0" } -} +} \ No newline at end of file diff --git a/apps/webapp/public/emails/logo-mono.png b/apps/webapp/public/emails/logo-mono.png index 7de735eb4e..04626d7b47 100644 Binary files a/apps/webapp/public/emails/logo-mono.png and b/apps/webapp/public/emails/logo-mono.png differ diff --git a/docker/docker-compose.yml b/docker/docker-compose.yml index c7fa6ab166..1e49504520 100644 --- a/docker/docker-compose.yml +++ b/docker/docker-compose.yml @@ -53,20 +53,3 @@ services: - app_network ports: - 6379:6379 - - otel-collector: - container_name: otel-collector - image: otel/opentelemetry-collector-contrib:latest - restart: always - command: ["--config", "/etc/otel-collector-config.yaml"] - volumes: - - ./otel-collector-config.yaml:/etc/otel-collector-config.yaml - ports: - - "55680:55680" - - "55681:55681" - - "4317:4317" # OTLP gRPC receiver - - "4318:4318" # OTLP http receiver - - "4319:4319" # OTLP internal http receiver - environment: - BASELIME_API_KEY: ${BASELIME_API_KEY} - BASELIME_SERVICE: ${BASELIME_SERVICE} diff --git a/docs/documentation/guides/deployment-setup.mdx b/docs/documentation/guides/deployment-setup.mdx index f669049b68..8789cc5921 100644 --- a/docs/documentation/guides/deployment-setup.mdx +++ b/docs/documentation/guides/deployment-setup.mdx @@ -16,7 +16,21 @@ export const client = new TriggerClient({ }); ``` -## 2. Set your environment variables on your server +## 2. Include your Jobs in `sideEffects` if needed + +If your `package.json` does not declare `sideEffects`, you can safely skip this step. Otherwise, include a glob pattern that matches your jobs so that they are not tree shaken away in your production bundle. + +```json +// package.json +{ + "name": "my-app", + "sideEffects": [ + "./src/jobs/**/*.ts" + ] +} +``` + +## 3. Set your environment variables on your server Ensure that your `TRIGGER_API_KEY` (or whatever you've set it to) environment variable is set on your server, to the correct value from the "Environments & API Keys" page in your Trigger.dev dashboard. @@ -31,11 +45,11 @@ The exact instructions will vary depending on where you deploy to. Here are the - [Cloudflare Workers](https://developers.cloudflare.com/workers/platform/environment-variables/) - [Netlify](https://docs.netlify.com/environment-variables/overview/) -## 3. Deploy your code as usual +## 4. Deploy your code as usual Your Job code lives in your codebase, so you can deploy it as you normally would. -## 4. Connect the Endpoint for the first time +## 5. Connect the Endpoint for the first time 1. Go to the "Environments & API Keys" page in your Trigger.dev dashboard ![Go to the Environments & API Keys page ](/images/environments-link.png) diff --git a/docs/images/v3/github-access-token.png b/docs/images/v3/github-access-token.png new file mode 100644 index 0000000000..04f6ee9bd5 Binary files /dev/null and b/docs/images/v3/github-access-token.png differ diff --git a/docs/images/v3/troubleshooting-alerts-blank.png b/docs/images/v3/troubleshooting-alerts-blank.png new file mode 100644 index 0000000000..e7f96a03a2 Binary files /dev/null and b/docs/images/v3/troubleshooting-alerts-blank.png differ diff --git a/docs/images/v3/troubleshooting-alerts-disable-delete.png b/docs/images/v3/troubleshooting-alerts-disable-delete.png new file mode 100644 index 0000000000..bf64de5516 Binary files /dev/null and b/docs/images/v3/troubleshooting-alerts-disable-delete.png differ diff --git a/docs/images/v3/troubleshooting-alerts-modal.png b/docs/images/v3/troubleshooting-alerts-modal.png new file mode 100644 index 0000000000..357185ff50 Binary files /dev/null and b/docs/images/v3/troubleshooting-alerts-modal.png differ diff --git a/docs/integrations/apis/supabase/management.mdx b/docs/integrations/apis/supabase/management.mdx index 286fc93e70..ceb27d7585 100644 --- a/docs/integrations/apis/supabase/management.mdx +++ b/docs/integrations/apis/supabase/management.mdx @@ -109,6 +109,9 @@ You'll have to do this for each Supabase project you want to use webhooks with. To use this feature, you'll first initialize a `db` instance, passing in your Supabase project [ID](https://supabase.com/dashboard/project/_/settings/api) (or URL): +Note: If you're using a custom domain, you still need to use this format to intialize the db object: +`https://.supabase.co`. + ```ts import { SupabaseManagement } from "@trigger.dev/supabase"; diff --git a/docs/mint.json b/docs/mint.json index ccdd24825d..13726189d6 100644 --- a/docs/mint.json +++ b/docs/mint.json @@ -1,14 +1,8 @@ { "$schema": "https://mintlify.com/schema.json", "name": "Trigger.dev", - "openapi": [ - "/openapi.yml", - "/v3-openapi.json" - ], - "versions": [ - "v3 (Developer Preview)", - "v2" - ], + "openapi": ["/openapi.yml", "/v3-openapi.json"], + "versions": ["v3 (Developer Preview)", "v2"], "logo": { "dark": "/logo/dark.png", "light": "/logo/light.png", @@ -48,16 +42,6 @@ "name": "Home" }, "tabs": [ - { - "name": "v3 Developer Preview", - "url": "https://trigger.dev/docs/v3", - "version": "v2" - }, - { - "name": "v2", - "url": "https://trigger.dev/docs/documentation", - "version": "v3 (Developer Preview)" - }, { "name": "Integrations", "url": "integrations", @@ -72,6 +56,16 @@ "name": "Examples", "url": "https://trigger.dev/apis", "version": "v2" + }, + { + "name": "v3 Developer Preview", + "url": "https://trigger.dev/docs/v3", + "version": "v2" + }, + { + "name": "v2", + "url": "https://trigger.dev/docs/documentation", + "version": "v3 (Developer Preview)" } ], "redirects": [ @@ -96,20 +90,12 @@ { "group": "", "version": "v3 (Developer Preview)", - "pages": [ - "v3/introduction" - ] + "pages": ["v3/introduction"] }, { "group": "Getting Started", "version": "v3 (Developer Preview)", - "pages": [ - "v3/quick-start", - "v3/upgrading-from-v2", - "v3/changelog", - "v3/feature-matrix", - "v3/limits-performance" - ] + "pages": ["v3/quick-start", "v3/upgrading-from-v2", "v3/changelog", "v3/feature-matrix"] }, { "group": "Fundamentals", @@ -121,12 +107,7 @@ "v3/apikeys", { "group": "Task types", - "pages": [ - "v3/tasks-regular", - "v3/tasks-scheduled", - "v3/tasks-zod", - "v3/tasks-webhooks" - ] + "pages": ["v3/tasks-regular", "v3/tasks-scheduled"] }, "v3/trigger-config" ] @@ -134,10 +115,7 @@ { "group": "Development", "version": "v3 (Developer Preview)", - "pages": [ - "v3/cli-dev", - "v3/run-tests" - ] + "pages": ["v3/cli-dev", "v3/run-tests"] }, { "group": "Deployment", @@ -148,9 +126,7 @@ "v3/github-actions", { "group": "Deployment integrations", - "pages": [ - "v3/vercel-integration" - ] + "pages": ["v3/vercel-integration"] } ] }, @@ -176,73 +152,13 @@ "v3/machines", "v3/idempotency", "v3/reattempting-replaying", - "v3/trigger-filters", - "v3/notifications", - "v3/rollbacks", - "v3/using-apis", - "v3/middleware", - "v3/automated-tests" - ] - }, - { - "group": "Dashboard", - "version": "v3 (Developer Preview)", - "pages": [ - "v3/dashboard-overview", - "v3/dashboard-runs", - "v3/dashboard-tests", - "v3/dashboard-environment-variables" + "v3/notifications" ] }, { "group": "API reference", "version": "v3 (Developer Preview)", "pages": [ - { - "group": "Functions", - "pages": [ - "v3/reference-task", - "v3/reference-cron-task", - "v3/reference-cron-dynamic", - "v3/reference-interval-task", - "v3/reference-interval-dynamic", - "v3/reference-zod-task", - "v3/reference-zod-catalog", - "v3/reference-task-trigger", - "v3/reference-task-trigger-and-wait", - "v3/reference-task-batch-trigger", - "v3/reference-task-batch-trigger-and-wait", - "v3/reference-wait-for", - "v3/reference-wait-until", - "v3/reference-wait-for-event", - "v3/reference-wait-for-request", - "v3/reference-retry-on-throw", - "v3/reference-retry-fetch", - "v3/reference-retry-intercept-fetch", - "v3/reference-notification-catalog", - "v3/reference-notify", - "v3/reference-queue" - ] - }, - { - "group": "Objects", - "pages": [ - "v3/reference-context" - ] - }, - { - "group": "CLI", - "pages": [ - "v3/reference-cli-init", - "v3/reference-cli-dev", - "v3/reference-cli-deploy", - "v3/reference-cli-login", - "v3/reference-cli-logout", - "v3/reference-cli-update", - "v3/reference-cli-build", - "v3/reference-cli-who-am-i" - ] - }, { "group": "Runs API", "pages": [ @@ -266,33 +182,24 @@ ] }, { - "group": "Architecture", + "group": "Open source", "version": "v3 (Developer Preview)", - "pages": [ - "v3/architecture-how-it-works", - "v3/architecture-multi-tenant-queue", - "v3/architecture-reliability" - ] + "pages": ["v3/github-repo", "v3/open-source-self-hosting", "v3/open-source-contributing"] }, { - "group": "Open source", + "group": "Troubleshooting", "version": "v3 (Developer Preview)", "pages": [ - "v3/github-repo", - "v3/open-source-self-hosting", - "v3/open-source-contributing" + "v3/troubleshooting-alerts", + "v3/troubleshooting-uptime-status", + "v3/troubleshooting-github-issues", + "v3/troubleshooting-github-discussions" ] }, { "group": "Help", "version": "v3 (Developer Preview)", - "pages": [ - "v3/help-faqs", - "v3/community", - "v3/help-email", - "v3/help-slack", - "v3/help-uptime-status" - ] + "pages": ["v3/community", "v3/help-slack", "v3/help-email"] }, { "group": "Getting Started", @@ -459,6 +366,7 @@ }, { "group": "Overview", + "version": "v2", "pages": [ "integrations/introduction", { @@ -479,13 +387,11 @@ }, { "group": "Integrations", + "version": "v2", "pages": [ { "group": "Airtable", - "pages": [ - "integrations/apis/airtable", - "integrations/apis/airtable-tasks" - ] + "pages": ["integrations/apis/airtable", "integrations/apis/airtable-tasks"] }, { "group": "GitHub", @@ -511,25 +417,16 @@ }, { "group": "Plain", - "pages": [ - "integrations/apis/plain", - "integrations/apis/plain-tasks" - ] + "pages": ["integrations/apis/plain", "integrations/apis/plain-tasks"] }, "integrations/apis/replicate", { "group": "SendGrid", - "pages": [ - "integrations/apis/sendgrid", - "integrations/apis/sendgrid-tasks" - ] + "pages": ["integrations/apis/sendgrid", "integrations/apis/sendgrid-tasks"] }, { "group": "Resend", - "pages": [ - "integrations/apis/resend", - "integrations/apis/resend-tasks" - ] + "pages": ["integrations/apis/resend", "integrations/apis/resend-tasks"] }, { "group": "Shopify", @@ -541,10 +438,7 @@ }, { "group": "Slack", - "pages": [ - "integrations/apis/slack", - "integrations/apis/slack-tasks" - ] + "pages": ["integrations/apis/slack", "integrations/apis/slack-tasks"] }, "integrations/apis/stripe", { @@ -560,6 +454,7 @@ }, { "group": "SDK", + "version": "v2", "pages": [ "sdk/introduction", { @@ -569,9 +464,7 @@ "sdk/triggerclient/constructor", { "group": "Instance properties", - "pages": [ - "sdk/triggerclient/store" - ] + "pages": ["sdk/triggerclient/store"] }, { "group": "Instance methods", @@ -634,10 +527,7 @@ "sdk/dynamictrigger/constructor", { "group": "Instance methods", - "pages": [ - "sdk/dynamictrigger/register", - "sdk/dynamictrigger/unregister" - ] + "pages": ["sdk/dynamictrigger/register", "sdk/dynamictrigger/unregister"] } ] }, @@ -648,10 +538,7 @@ "sdk/dynamicschedule/constructor", { "group": "Instance methods", - "pages": [ - "sdk/dynamicschedule/register", - "sdk/dynamicschedule/unregister" - ] + "pages": ["sdk/dynamicschedule/register", "sdk/dynamicschedule/unregister"] } ] }, @@ -663,12 +550,12 @@ }, { "group": "HTTP Reference", - "pages": [ - "sdk/api-reference/events/create-an-event" - ] + "version": "v2", + "pages": ["sdk/api-reference/events/create-an-event"] }, { "group": "React SDK", + "version": "v2", "pages": [ "sdk/react/introduction", "sdk/react/triggerprovider", @@ -680,9 +567,7 @@ { "group": "Overview", "version": "v2", - "pages": [ - "examples/introduction" - ] + "pages": ["examples/introduction"] } ], "footerSocials": { @@ -690,4 +575,4 @@ "github": "https://github.com/triggerdotdev", "linkedin": "https://www.linkedin.com/company/triggerdotdev" } -} \ No newline at end of file +} diff --git a/docs/v3/architecture-how-it-works.mdx b/docs/v3/architecture-how-it-works.mdx deleted file mode 100644 index 728e167072..0000000000 --- a/docs/v3/architecture-how-it-works.mdx +++ /dev/null @@ -1,7 +0,0 @@ ---- -title: "Architecture: How it works" -sidebarTitle: "How it works" -description: "An overview of how Trigger.dev v3 works under the hood." ---- - - diff --git a/docs/v3/architecture-multi-tenant-queue.mdx b/docs/v3/architecture-multi-tenant-queue.mdx deleted file mode 100644 index b775b401c4..0000000000 --- a/docs/v3/architecture-multi-tenant-queue.mdx +++ /dev/null @@ -1,7 +0,0 @@ ---- -title: "Architecture: Multi-tenant queue" -sidebarTitle: "Multi-tenant queue" -description: "We built a reliable and fair multi-tenant queue that controls triggering all tasks." ---- - - diff --git a/docs/v3/architecture-reliability.mdx b/docs/v3/architecture-reliability.mdx deleted file mode 100644 index 24bf1474af..0000000000 --- a/docs/v3/architecture-reliability.mdx +++ /dev/null @@ -1,7 +0,0 @@ ---- -title: "Architecture: Reliability" -sidebarTitle: "Reliability" -description: "How reliability is achieved with Trigger.dev." ---- - - diff --git a/docs/v3/automated-tests.mdx b/docs/v3/automated-tests.mdx deleted file mode 100644 index 8c6c6cee1b..0000000000 --- a/docs/v3/automated-tests.mdx +++ /dev/null @@ -1,6 +0,0 @@ ---- -title: "Automated tests" -description: "Write automated tests of your tasks." ---- - - diff --git a/docs/v3/community.mdx b/docs/v3/community.mdx index 08c6e8242d..7166c5b5be 100644 --- a/docs/v3/community.mdx +++ b/docs/v3/community.mdx @@ -1,5 +1,5 @@ --- -title: "Community" +title: "Discord Community" url: "https://trigger.dev/discord" --- diff --git a/docs/v3/feature-matrix.mdx b/docs/v3/feature-matrix.mdx index 709aca5982..119fbcd961 100644 --- a/docs/v3/feature-matrix.mdx +++ b/docs/v3/feature-matrix.mdx @@ -3,24 +3,23 @@ title: "Feature matrix" description: "What features are currently available in the Developer Preview" --- -| Feature | Description | Status | -| ----------------------------------------------------------------------------------- | -------------------------------------------------- | ------ | -| [Regular tasks](/v3/tasks-regular) | A task that can be triggered from anywhere | βœ… | -| [Triggering](/v3/triggering) | Triggering and batch triggering tasks | βœ… | -| [Testing from the dashboard](/v3/run-tests) | Test your tasks from the dashboard | βœ… | -| [Queues and concurrency controls](/v3/queue-concurrency) | Queues and concurrency controls | βœ… | -| [Per-tenant queuing](/v3/queue-concurrency#concurrency-keys-and-per-tenant-queuing) | Separate queues for each of your users | βœ… | -| [Reattempts and retrying](/v3/errors-retrying) | Write reliable tasks using retries | βœ… | -| [Atomic versioning](/v3/versioning) | Each deploy creates a new version | βœ… | -| [Deploy via CLI](/v3/cli-deploy) | Deploy from the command line | βœ… | -| [Deploy via GitHub Actions](/v3/github-actions) | Deploy using GitHub Actions | βœ… | -| [Scheduled tasks](/v3/tasks-scheduled) | A task that can be triggered on a schedule | βœ… | -| [Zod tasks](/v3/tasks-zod) | Define tasks using Zod schemas | ⏳ | -| [Webhook tasks](/v3/tasks-webhooks) | A task that can be triggered by a webhook | ⏳ | -| Full text search of runs | Find a run by searching the payload and output | ⏳ | -| Logs view with search | All logs view with filtering and full text search | ⏳ | -| Alerts | Add alerts in the UI for errors and queue backlogs | ⏳ | -| Notifications | Send data to your web app from a run | ⏳ | -| Rollbacks | Easily rollback changes when errors happen | ⏳ | +| Feature | Description | Status | +| ----------------------------------------------------------------------------------- | ------------------------------------------------- | ------ | +| [Regular tasks](/v3/tasks-regular) | A task that can be triggered from anywhere | βœ… | +| [Triggering](/v3/triggering) | Triggering and batch triggering tasks | βœ… | +| [Testing from the dashboard](/v3/run-tests) | Test your tasks from the dashboard | βœ… | +| [Queues and concurrency controls](/v3/queue-concurrency) | Queues and concurrency controls | βœ… | +| [Per-tenant queuing](/v3/queue-concurrency#concurrency-keys-and-per-tenant-queuing) | Separate queues for each of your users | βœ… | +| [Reattempts and retrying](/v3/errors-retrying) | Write reliable tasks using retries | βœ… | +| [Atomic versioning](/v3/versioning) | Each deploy creates a new version | βœ… | +| [Deploy via CLI](/v3/cli-deploy) | Deploy from the command line | βœ… | +| [Deploy via GitHub Actions](/v3/github-actions) | Deploy using GitHub Actions | βœ… | +| Alerts | Add alerts in the UI for errors and deploys | βœ… | +| [Scheduled tasks](/v3/tasks-scheduled) | A task that can be triggered on a schedule | βœ… | +| [Zod tasks](/v3/tasks-zod) | Define tasks using Zod schemas | ⏳ | +| Full text search of runs | Find a run by searching the payload and output | ⏳ | +| Logs view with search | All logs view with filtering and full text search | ⏳ | +| Notifications | Send data to your web app from a run | ⏳ | +| Rollbacks | Easily rollback changes when errors happen | ⏳ | [Let us know](https://trigger.dev/discord) what we should prioritize and what we are missing. diff --git a/docs/v3/github-actions.mdx b/docs/v3/github-actions.mdx index 15ea3f81c0..9ba5eeaed2 100644 --- a/docs/v3/github-actions.mdx +++ b/docs/v3/github-actions.mdx @@ -5,7 +5,10 @@ description: "You can easily deploy your tasks with GitHub actions." This simple GitHub action file will deploy you Trigger.dev tasks when new code is pushed to the `main` branch and the `trigger` directory has changes in it. -The deploy step will fail if any version mismatches are detected. Please see the [version pinning](/v3/github-actions#version-pinning) section for more details. + + The deploy step will fail if any version mismatches are detected. Please see the [version + pinning](/v3/github-actions#version-pinning) section for more details. + @@ -30,7 +33,7 @@ jobs: uses: actions/setup-node@v4 with: node-version: "20.x" - + - name: Install dependencies run: npm install @@ -41,7 +44,6 @@ jobs: npx trigger.dev@beta deploy ``` - ```yaml .github/workflows/release-trigger-staging.yml name: Deploy to Trigger.dev (staging) @@ -60,7 +62,7 @@ jobs: uses: actions/setup-node@v4 with: node-version: "20.x" - + - name: Install dependencies run: npm install @@ -70,6 +72,7 @@ jobs: run: | npx trigger.dev@beta deploy --env staging ``` + If you already have a GitHub action file, you can just add the final step "πŸš€ Deploy Trigger.dev" to your existing file. @@ -78,6 +81,17 @@ You need to add the `TRIGGER_ACCESS_TOKEN` secret to your repository. You can cr To set it in GitHub go to your repository, click on "Settings", "Secrets and variables" and then "Actions". Add a new secret with the name `TRIGGER_ACCESS_TOKEN` and use the value of your access token. + + 1. Go to your repository on GitHub. + 2. Click on "Settings". + 3. Click on "Secrets and variables" -> "Actions" + 4. Click on "New repository secret". + 5. Add the name `TRIGGER_ACCESS_TOKEN` and the value of your access token. + +![Add TRIGGER_ACCESS_TOKEN in GitHub](/images/v3/github-access-token.png) + + + ## Version pinning The CLI and `@trigger.dev/*` package versions need to be in sync, otherwise there will be errors and unpredictable behavior. Hence, the `deploy` command will automatically fail during CI on any version mismatches. diff --git a/docs/v3/help-faqs.mdx b/docs/v3/help-faqs.mdx deleted file mode 100644 index 27a327d3c5..0000000000 --- a/docs/v3/help-faqs.mdx +++ /dev/null @@ -1,6 +0,0 @@ ---- -title: "Frequently Asked Questions" -sidebarTitle: "FAQs" ---- - - diff --git a/docs/v3/help-slack.mdx b/docs/v3/help-slack.mdx index 9edad7a029..6388cb66b3 100644 --- a/docs/v3/help-slack.mdx +++ b/docs/v3/help-slack.mdx @@ -1,5 +1,5 @@ --- -title: "Slack" +title: "Slack support" --- If you have a paid Trigger.dev account, you can request a private Slack Connect channel. diff --git a/docs/v3/help-uptime-status.mdx b/docs/v3/help-uptime-status.mdx deleted file mode 100644 index 9774c655b7..0000000000 --- a/docs/v3/help-uptime-status.mdx +++ /dev/null @@ -1,6 +0,0 @@ ---- -title: "Uptime status" -url: "https://trigger.openstatus.dev/" ---- - -View the [current Trigger.dev Cloud system status](https://trigger.openstatus.dev/). diff --git a/docs/v3/limits-performance.mdx b/docs/v3/limits-performance.mdx deleted file mode 100644 index cf985e9572..0000000000 --- a/docs/v3/limits-performance.mdx +++ /dev/null @@ -1,5 +0,0 @@ ---- -title: "Limits & Performance" ---- - - diff --git a/docs/v3/middleware.mdx b/docs/v3/middleware.mdx deleted file mode 100644 index f1dee48ac6..0000000000 --- a/docs/v3/middleware.mdx +++ /dev/null @@ -1,6 +0,0 @@ ---- -title: "Middleware" -description: "This function is called before the `run` function, it allows you to wrap the run function with custom code." ---- - - diff --git a/docs/v3/reference-cli-build.mdx b/docs/v3/reference-cli-build.mdx deleted file mode 100644 index 03876fecde..0000000000 --- a/docs/v3/reference-cli-build.mdx +++ /dev/null @@ -1,7 +0,0 @@ ---- -title: "trigger.dev build" -sidebarTitle: "build" -description: "This command will build your tasks." ---- - - diff --git a/docs/v3/reference-cli-deploy.mdx b/docs/v3/reference-cli-deploy.mdx deleted file mode 100644 index d9fd782c49..0000000000 --- a/docs/v3/reference-cli-deploy.mdx +++ /dev/null @@ -1,7 +0,0 @@ ---- -title: "trigger.dev deploy" -sidebarTitle: "deploy" -description: "This command will deploy your tasks." ---- - - diff --git a/docs/v3/reference-cli-dev.mdx b/docs/v3/reference-cli-dev.mdx deleted file mode 100644 index fc822b6ddc..0000000000 --- a/docs/v3/reference-cli-dev.mdx +++ /dev/null @@ -1,7 +0,0 @@ ---- -title: "trigger.dev dev" -sidebarTitle: "dev" -description: "This command runs your tasks locally." ---- - - diff --git a/docs/v3/reference-cli-init.mdx b/docs/v3/reference-cli-init.mdx deleted file mode 100644 index 65fc1840f6..0000000000 --- a/docs/v3/reference-cli-init.mdx +++ /dev/null @@ -1,7 +0,0 @@ ---- -title: "trigger.dev init" -sidebarTitle: "init" -description: "This command will setup your v3 project." ---- - - diff --git a/docs/v3/reference-cli-login.mdx b/docs/v3/reference-cli-login.mdx deleted file mode 100644 index d8c24f97c3..0000000000 --- a/docs/v3/reference-cli-login.mdx +++ /dev/null @@ -1,7 +0,0 @@ ---- -title: "trigger.dev login" -sidebarTitle: "login" -description: "This command will log you in to the CLI. Required to run any other command." ---- - - diff --git a/docs/v3/reference-cli-logout.mdx b/docs/v3/reference-cli-logout.mdx deleted file mode 100644 index 1ca1453f97..0000000000 --- a/docs/v3/reference-cli-logout.mdx +++ /dev/null @@ -1,7 +0,0 @@ ---- -title: "trigger.dev logout" -sidebarTitle: "logout" -description: "This command will log you out of the CLI." ---- - - diff --git a/docs/v3/reference-cli-update.mdx b/docs/v3/reference-cli-update.mdx deleted file mode 100644 index f526a77a0b..0000000000 --- a/docs/v3/reference-cli-update.mdx +++ /dev/null @@ -1,7 +0,0 @@ ---- -title: "trigger.dev update" -sidebarTitle: "update" -description: "This command can be used to update all of your trigger.dev packages to the latest versions." ---- - - diff --git a/docs/v3/reference-cli-who-am-i.mdx b/docs/v3/reference-cli-who-am-i.mdx deleted file mode 100644 index c23acbdc91..0000000000 --- a/docs/v3/reference-cli-who-am-i.mdx +++ /dev/null @@ -1,7 +0,0 @@ ---- -title: "trigger.dev whoami" -sidebarTitle: "whoami" -description: "This command will return information about you, the logged in user." ---- - - diff --git a/docs/v3/reference-context.mdx b/docs/v3/reference-context.mdx deleted file mode 100644 index 9b869e2700..0000000000 --- a/docs/v3/reference-context.mdx +++ /dev/null @@ -1,6 +0,0 @@ ---- -title: "Context" -description: "The Context object is part of the `run` function parameter and provides information about the current run." ---- - - diff --git a/docs/v3/reference-cron-dynamic.mdx b/docs/v3/reference-cron-dynamic.mdx deleted file mode 100644 index e104b91655..0000000000 --- a/docs/v3/reference-cron-dynamic.mdx +++ /dev/null @@ -1,6 +0,0 @@ ---- -title: "cron.dynamic()" -description: "Trigger a task with many different CRON schedules. For example you can use this to let your users select when they want a reminder." ---- - - diff --git a/docs/v3/reference-cron-task.mdx b/docs/v3/reference-cron-task.mdx deleted file mode 100644 index c8b8ef9f5d..0000000000 --- a/docs/v3/reference-cron-task.mdx +++ /dev/null @@ -1,6 +0,0 @@ ---- -title: "cron.task()" -description: "Trigger a task on a recurring schedule using a CRON expression." ---- - - diff --git a/docs/v3/reference-interval-dynamic.mdx b/docs/v3/reference-interval-dynamic.mdx deleted file mode 100644 index 140189daba..0000000000 --- a/docs/v3/reference-interval-dynamic.mdx +++ /dev/null @@ -1,6 +0,0 @@ ---- -title: "interval.dynamic()" -description: "Trigger a task with many different interval schedules. For example you can use this to let your users select how often they want a reminder." ---- - - diff --git a/docs/v3/reference-interval-task.mdx b/docs/v3/reference-interval-task.mdx deleted file mode 100644 index edbb0496af..0000000000 --- a/docs/v3/reference-interval-task.mdx +++ /dev/null @@ -1,6 +0,0 @@ ---- -title: "interval.task()" -description: "Trigger a task on a recurring schedule using the time interval you want between runs." ---- - - diff --git a/docs/v3/reference-notification-catalog.mdx b/docs/v3/reference-notification-catalog.mdx deleted file mode 100644 index 258d937370..0000000000 --- a/docs/v3/reference-notification-catalog.mdx +++ /dev/null @@ -1,6 +0,0 @@ ---- -title: "notification.catalog()" -description: "Create a set of events that can be emitted from your tasks. These can be subscribed to from your application to provide real-time updates to your users." ---- - - diff --git a/docs/v3/reference-notify.mdx b/docs/v3/reference-notify.mdx deleted file mode 100644 index b905af74b9..0000000000 --- a/docs/v3/reference-notify.mdx +++ /dev/null @@ -1,6 +0,0 @@ ---- -title: "notify()" -description: "Send a notification from your tasks. These can be subscribed to from your application to provide real-time updates to your users." ---- - - diff --git a/docs/v3/reference-queue.mdx b/docs/v3/reference-queue.mdx deleted file mode 100644 index f6a565690e..0000000000 --- a/docs/v3/reference-queue.mdx +++ /dev/null @@ -1,6 +0,0 @@ ---- -title: "queue()" -description: "Create queue settings that can be used when triggering a task." ---- - - diff --git a/docs/v3/reference-retry-fetch.mdx b/docs/v3/reference-retry-fetch.mdx deleted file mode 100644 index ede4b00db6..0000000000 --- a/docs/v3/reference-retry-fetch.mdx +++ /dev/null @@ -1,6 +0,0 @@ ---- -title: "retry.fetch()" -description: "Inside a task, do a fetch request that will retry (you can specify the retry conditions)." ---- - - diff --git a/docs/v3/reference-retry-intercept-fetch.mdx b/docs/v3/reference-retry-intercept-fetch.mdx deleted file mode 100644 index 68ced1ba79..0000000000 --- a/docs/v3/reference-retry-intercept-fetch.mdx +++ /dev/null @@ -1,6 +0,0 @@ ---- -title: "retry.interceptFetch()" -description: "Useful when writing automated tests – it will intercept matching HTTP requests and respond with what you provide." ---- - - diff --git a/docs/v3/reference-retry-on-throw.mdx b/docs/v3/reference-retry-on-throw.mdx deleted file mode 100644 index 4dd2f36c45..0000000000 --- a/docs/v3/reference-retry-on-throw.mdx +++ /dev/null @@ -1,6 +0,0 @@ ---- -title: "retry.onThrow()" -description: "Inside a task, retry the wrapped code if it throws an error." ---- - - diff --git a/docs/v3/reference-task-batch-trigger-and-wait.mdx b/docs/v3/reference-task-batch-trigger-and-wait.mdx deleted file mode 100644 index e8402ba8e5..0000000000 --- a/docs/v3/reference-task-batch-trigger-and-wait.mdx +++ /dev/null @@ -1,6 +0,0 @@ ---- -title: "task.batchTriggerAndWait()" -description: "Trigger a task many times at once from inside another task, and wait for all the results." ---- - - diff --git a/docs/v3/reference-task-batch-trigger.mdx b/docs/v3/reference-task-batch-trigger.mdx deleted file mode 100644 index 209ac4701b..0000000000 --- a/docs/v3/reference-task-batch-trigger.mdx +++ /dev/null @@ -1,6 +0,0 @@ ---- -title: "task.batchTrigger()" -description: "Trigger a task many times at once from your code." ---- - - diff --git a/docs/v3/reference-task-trigger-and-wait.mdx b/docs/v3/reference-task-trigger-and-wait.mdx deleted file mode 100644 index ec98fda17c..0000000000 --- a/docs/v3/reference-task-trigger-and-wait.mdx +++ /dev/null @@ -1,6 +0,0 @@ ---- -title: "task.triggerAndWait()" -description: "Trigger a task from inside another task, and wait for the result." ---- - - diff --git a/docs/v3/reference-task-trigger.mdx b/docs/v3/reference-task-trigger.mdx deleted file mode 100644 index 09fe9b944c..0000000000 --- a/docs/v3/reference-task-trigger.mdx +++ /dev/null @@ -1,6 +0,0 @@ ---- -title: "task.trigger()" -description: "Trigger a task from your code." ---- - - diff --git a/docs/v3/reference-task.mdx b/docs/v3/reference-task.mdx deleted file mode 100644 index e88b32fb6f..0000000000 --- a/docs/v3/reference-task.mdx +++ /dev/null @@ -1,6 +0,0 @@ ---- -title: "task()" -description: "The task() function is the simplest way to create a long-running task." ---- - - diff --git a/docs/v3/reference-wait-for-event.mdx b/docs/v3/reference-wait-for-event.mdx deleted file mode 100644 index d2123c54d2..0000000000 --- a/docs/v3/reference-wait-for-event.mdx +++ /dev/null @@ -1,6 +0,0 @@ ---- -title: "wait.forEvent()" -description: "Inside a task, wait until a specific event is received before continuing." ---- - - diff --git a/docs/v3/reference-wait-for-request.mdx b/docs/v3/reference-wait-for-request.mdx deleted file mode 100644 index 63df356363..0000000000 --- a/docs/v3/reference-wait-for-request.mdx +++ /dev/null @@ -1,6 +0,0 @@ ---- -title: "wait.forRequest()" -description: "Inside a task, wait until a specific request is received before continuing." ---- - - diff --git a/docs/v3/reference-wait-for.mdx b/docs/v3/reference-wait-for.mdx deleted file mode 100644 index a0ea793c9f..0000000000 --- a/docs/v3/reference-wait-for.mdx +++ /dev/null @@ -1,6 +0,0 @@ ---- -title: "wait.for()" -description: "Inside a task, wait for a period of time before continuing." ---- - - diff --git a/docs/v3/reference-wait-until.mdx b/docs/v3/reference-wait-until.mdx deleted file mode 100644 index fda6673cc2..0000000000 --- a/docs/v3/reference-wait-until.mdx +++ /dev/null @@ -1,6 +0,0 @@ ---- -title: "wait.until()" -description: "Inside a task, wait until the specified date before continuing." ---- - - diff --git a/docs/v3/reference-zod-catalog.mdx b/docs/v3/reference-zod-catalog.mdx deleted file mode 100644 index 17350b192a..0000000000 --- a/docs/v3/reference-zod-catalog.mdx +++ /dev/null @@ -1,6 +0,0 @@ ---- -title: "zod.catalog()" -description: "Create a set of events with names and payloads that are parsed using Zod schemas." ---- - - diff --git a/docs/v3/reference-zod-task.mdx b/docs/v3/reference-zod-task.mdx deleted file mode 100644 index 48aca50204..0000000000 --- a/docs/v3/reference-zod-task.mdx +++ /dev/null @@ -1,6 +0,0 @@ ---- -title: "zod.task()" -description: "A task where the payload is parsed using a zod schema." ---- - - diff --git a/docs/v3/rollbacks.mdx b/docs/v3/rollbacks.mdx deleted file mode 100644 index 94600a7fce..0000000000 --- a/docs/v3/rollbacks.mdx +++ /dev/null @@ -1,6 +0,0 @@ ---- -title: "Rollbacks" -description: "You can rollback changes when errors happen, to give transactional guarantees to your operations." ---- - - diff --git a/docs/v3/tasks-overview.mdx b/docs/v3/tasks-overview.mdx index 09ec23ab75..24f09531a6 100644 --- a/docs/v3/tasks-overview.mdx +++ b/docs/v3/tasks-overview.mdx @@ -59,7 +59,7 @@ This is used to identify your task so it can be triggered, managed, and you can Your custom code inside `run()` will be executed when your task is triggered. It’s an async function that has two arguments: 1. The run payload - the data that you pass to the task when you trigger it. -2. An object with `ctx` about the run ([Context](/v3/reference-context)), and any output from the optional `init` function that runs before every run attempt. +2. An object with `ctx` about the run (Context), and any output from the optional `init` function that runs before every run attempt. Anything you return from the `run` function will be the result of the task. Data you return must be JSON serializable: strings, numbers, booleans, arrays, objects, and null. @@ -85,7 +85,7 @@ export const taskWithRetries = task({ }); ``` -For more information read [the retrying guide](/v3/retrying), or see the [SDK reference](/v3/reference-task). +For more information read [the retrying guide](/v3/retrying). It's also worth mentioning that you can [retry a block of code](/v3/retrying) inside your tasks as well. diff --git a/docs/v3/tasks-regular.mdx b/docs/v3/tasks-regular.mdx index c9fb4d233d..6f48a0fa3b 100644 --- a/docs/v3/tasks-regular.mdx +++ b/docs/v3/tasks-regular.mdx @@ -19,7 +19,7 @@ Sometimes OpenAI calls can take a long time to complete, or they can fail. This This example uses Resend to send a sequence of emails over several days. -Each email is wrapped in [retry.onThrow](/v3/reference-retry-on-throw). This will retry the block of code if an error is thrown. This is useful when you don't want to retry the whole task, but just a part of it. The entire task will use the default retrying, so can also retry. +Each email is wrapped in `retry.onThrow`. This will retry the block of code if an error is thrown. This is useful when you don't want to retry the whole task, but just a part of it. The entire task will use the default retrying, so can also retry. Additionally this task uses `wait.for` to wait for a certain amount of time before sending the next email. During the waiting time, the task will be paused and will not consume any resources. diff --git a/docs/v3/tasks-webhooks.mdx b/docs/v3/tasks-webhooks.mdx deleted file mode 100644 index acd20108f1..0000000000 --- a/docs/v3/tasks-webhooks.mdx +++ /dev/null @@ -1,12 +0,0 @@ ---- -title: "Webhook tasks" -description: "A task that is triggered when a webhook is received from an API." ---- - -## Built-in webhooks triggers - - - -## How to manually use Trigger.dev with webhooks - - diff --git a/docs/v3/tasks-zod.mdx b/docs/v3/tasks-zod.mdx deleted file mode 100644 index 0c80b68773..0000000000 --- a/docs/v3/tasks-zod.mdx +++ /dev/null @@ -1,6 +0,0 @@ ---- -title: "Zod tasks" -description: "You can use Zod to define a catalog of events and then attach those events to your tasks." ---- - - diff --git a/docs/v3/trigger-filters.mdx b/docs/v3/trigger-filters.mdx deleted file mode 100644 index 42ceb71a37..0000000000 --- a/docs/v3/trigger-filters.mdx +++ /dev/null @@ -1,6 +0,0 @@ ---- -title: "Trigger filters" -description: "You can add filters to your tasks so they're only triggered when certain conditions are met." ---- - - diff --git a/docs/v3/troubleshooting-alerts.mdx b/docs/v3/troubleshooting-alerts.mdx new file mode 100644 index 0000000000..d7cc4635aa --- /dev/null +++ b/docs/v3/troubleshooting-alerts.mdx @@ -0,0 +1,29 @@ +--- +title: "Alerts" +description: "Get alerted when runs or deployments fail, or when deployments succeed." +--- + + + + +Click on "Alerts" in the left hand side menu, then click on "New alert" to open the new alert modal. + ![Email alerts](/images/v3/troubleshooting-alerts-blank.png) + + + +Choose to be notified by email, Slack notification or webhook whenever: + +- a run fails +- a deployment fails +- a deployment succeeds + + ![Email alerts](/images/v3/troubleshooting-alerts-modal.png) + + + +Click on the triple dot menu on the right side of the table row and select "Disable" or "Delete". + + ![Disable and delete alerts](/images/v3/troubleshooting-alerts-disable-delete.png) + + + diff --git a/docs/v3/troubleshooting-github-discussions.mdx b/docs/v3/troubleshooting-github-discussions.mdx new file mode 100644 index 0000000000..0b82ec0b55 --- /dev/null +++ b/docs/v3/troubleshooting-github-discussions.mdx @@ -0,0 +1,6 @@ +--- +title: "GitHub Discussions" +url: "https://github.com/triggerdotdev/trigger.dev/discussions" +--- + +Please [join our community on Discord](https://github.com/triggerdotdev/trigger.dev/discussions) to ask questions, share your projects, and get help from other developers. diff --git a/docs/v3/troubleshooting-github-issues.mdx b/docs/v3/troubleshooting-github-issues.mdx new file mode 100644 index 0000000000..51052d4d53 --- /dev/null +++ b/docs/v3/troubleshooting-github-issues.mdx @@ -0,0 +1,6 @@ +--- +title: "GitHub Issues" +url: "https://github.com/triggerdotdev/trigger.dev/issues" +--- + +Please [join our community on Discord](https://github.com/triggerdotdev/trigger.dev/issues) to ask questions, share your projects, and get help from other developers. diff --git a/docs/v3/troubleshooting-uptime-status.mdx b/docs/v3/troubleshooting-uptime-status.mdx new file mode 100644 index 0000000000..0890887f5b --- /dev/null +++ b/docs/v3/troubleshooting-uptime-status.mdx @@ -0,0 +1,6 @@ +--- +title: "Uptime Status" +--- + +Get email notifications when Trigger.dev creates, updates or resolves a platform incident. +[Subscribe](https://status.trigger.dev/) diff --git a/docs/v3/using-apis.mdx b/docs/v3/using-apis.mdx deleted file mode 100644 index f298e1c25a..0000000000 --- a/docs/v3/using-apis.mdx +++ /dev/null @@ -1,16 +0,0 @@ ---- -title: "Using APIs" -description: "You can use any Node.js library inside the run function, or do HTTP requests." ---- - -## Using Node.js SDKs - - - -## Using fetch or axios - - - -## Using webhooks - - diff --git a/experiments/context.ts b/experiments/context.ts new file mode 100644 index 0000000000..3b723f8739 --- /dev/null +++ b/experiments/context.ts @@ -0,0 +1,604 @@ +import { z } from "zod"; +import * as v from "valibot"; +import { wrap } from "@typeschema/valibot"; + +export type ParserZodEsque = { + _input: TInput; + _output: TParsedInput; +}; + +export type ParserMyZodEsque = { + parse: (input: any) => TInput; +}; + +export type ParserSuperstructEsque = { + create: (input: unknown) => TInput; +}; + +export type ParserCustomValidatorEsque = (input: unknown) => Promise | TInput; + +export type ParserYupEsque = { + validateSync: (input: unknown) => TInput; +}; + +export type ParserScaleEsque = { + assert(value: unknown): asserts value is TInput; +}; + +export type ParserWithoutInput = + | ParserCustomValidatorEsque + | ParserMyZodEsque + | ParserScaleEsque + | ParserSuperstructEsque + | ParserYupEsque; + +export type ParserWithInputOutput = ParserZodEsque; + +export type Parser = ParserWithInputOutput | ParserWithoutInput; + +export type inferParser = TParser extends ParserWithInputOutput< + infer $TIn, + infer $TOut +> + ? { + in: $TIn; + out: $TOut; + } + : TParser extends ParserWithoutInput + ? { + in: $InOut; + out: $InOut; + } + : never; + +export type Simplify = TType extends any[] | Date ? TType : { [K in keyof TType]: TType[K] }; + +export type TriggerResult = { + id: string; +}; + +export type TaskRunResult = + | { + ok: true; + id: string; + output: TOutput; + } + | { + ok: false; + id: string; + error: unknown; + }; + +export type RunMetadata = { + run: string; +}; + +export type inferContext = + TContextBuilder extends ContextBuilder + ? TContext extends UnsetMarker + ? unknown + : TContextOverrides extends UnsetMarker + ? Simplify + : Simplify> + : never; + +export type RunFnParams = { + /** Metadata about the task, run, attempt, queue, environment, organization, project and batch. */ + meta: RunMetadata; + + /** Context added by task middleware */ + ctx: inferContext; + + payload: TPayload; +}; + +/** + * See https://github.com/microsoft/TypeScript/issues/41966#issuecomment-758187996 + * Fixes issues with iterating over keys of objects with index signatures. + * Without this, iterations over keys of objects with index signatures will lose + * type information about the keys and only the index signature will remain. + * @internal + */ +export type WithoutIndexSignature = { + [K in keyof TObj as string extends K ? never : number extends K ? never : K]: TObj[K]; +}; + +/** + * @internal + * Overwrite properties in `TType` with properties in `TWith` + * Only overwrites properties when the type to be overwritten + * is an object. Otherwise it will just use the type from `TWith`. + */ +export type Overwrite = TWith extends any + ? TType extends object + ? { + [K in + | keyof WithoutIndexSignature + | keyof WithoutIndexSignature]: K extends keyof TWith // Exclude index signature from keys + ? TWith[K] + : K extends keyof TType + ? TType[K] + : never; + } & (string extends keyof TWith // Handle cases with an index signature + ? { [key: string]: TWith[string] } + : number extends keyof TWith + ? { [key: number]: TWith[number] } + : // eslint-disable-next-line @typescript-eslint/ban-types + {}) + : TWith + : never; + +/** @internal */ +export const contextMiddlewareMarker = "contextMiddlewareMarker" as "contextMiddlewareMarker" & { + __brand: "contextMiddlewareMarker"; +}; +type ContextMiddlewareMarker = typeof contextMiddlewareMarker; + +interface ContextMiddlewareResultBase { + /** + * All middlewares should pass through their `next()`'s output. + * Requiring this marker makes sure that can't be forgotten at compile-time. + */ + readonly marker: ContextMiddlewareMarker; +} + +interface ContextMiddlewareOKResult<_TContextOverride> extends ContextMiddlewareResultBase { + ok: true; + data: unknown; +} + +interface ContextMiddlewareErrorResult<_TContextOverride> extends ContextMiddlewareResultBase { + ok: false; + error: Error; // should be our error +} + +/** + * @internal + */ +export type ContextMiddlewareResult<_TContextOverride> = + | ContextMiddlewareErrorResult<_TContextOverride> + | ContextMiddlewareOKResult<_TContextOverride>; + +export type ContextMiddlewareFunction = { + (opts: { + ctx: Simplify>; + meta: RunMetadata; + next: { + (): Promise>; + <$ContextOverride>(ctx: $ContextOverride): Promise>; + }; + }): Promise>; +}; + +export const unsetMarker = Symbol("unsetMarker"); +export type UnsetMarker = typeof unsetMarker; + +export interface ContextBuilder { + use<$ContextOverridesOut>( + fn: ContextMiddlewareFunction + ): ContextBuilder>; +} + +export type AnyContextBuilder = ContextBuilder; + +export function createContext( + initialContext?: TContext +): ContextBuilder { + const builder: AnyContextBuilder = { + use(middlewareFn) { + return {} as AnyContextBuilder; + }, + }; + + return builder; +} + +const contextBuilder = createContext({ foo: "bar" }); +const context = contextBuilder + .use((opts) => { + return opts.next({ + baz: "whatever", + }); + }) + .use((opts) => { + return opts.next({ + db: { + find: async (id: string) => { + return "hello"; + }, + }, + }); + }); + +type ctx = inferContext; + +const contextBuilder2 = createContext(); + +type ctx2 = inferContext; + +const contextBuilder3 = createContext({ bar: "baz" }); + +type ctx3 = inferContext; + +const contextBuilder4 = createContext().use((opts) => { + return opts.next({ + hello: "world", + }); +}); + +type ctx4 = inferContext; + +export type TaskOptions< + TOutput, + TContext extends AnyContextBuilder, + TIdentifier extends string, + TParser extends Parser | undefined = undefined, +> = { + /** An id for your task. This must be unique inside your project and not change between versions. */ + id: TIdentifier; + + schema?: TParser; + + context?: TContext; + + /** This gets called when a task is triggered. It's where you put the code you want to execute. + * + * @param payload - The payload that is passed to your task when it's triggered. This must be JSON serializable. + * @param params - Metadata about the run. + */ + run: (params: Simplify, TContext>>) => Promise; +}; + +export interface Task< + TOutput, + TIdentifier extends string, + TParser extends Parser | undefined = undefined, +> { + /** + * The id of the task. + */ + id: TIdentifier; + /** + * Trigger a task with the given payload, and continue without waiting for the result. If you want to wait for the result, use `triggerAndWait`. Returns the id of the triggered task run. + * @param payload + * @param options + * @returns TriggerResult + * - `id` - The id of the triggered task run. + */ + trigger: ( + payload: Simplify>, + options?: TriggerTaskOptions + ) => Promise; + + /** + * Trigger a task with the given payload, and wait for the result. Returns the result of the task run + * @param payload + * @param options - Options for the task run + * @returns TaskRunResult + * @example + * ``` + * const result = await task.triggerAndWait({ foo: "bar" }); + * + * if (result.ok) { + * console.log(result.output); + * } else { + * console.error(result.error); + * } + * ``` + */ + triggerAndWait: ( + payload: Simplify>, + options?: TriggerTaskOptions + ) => Promise>; +} + +export type AnyTask = Task; + +type inferParserIn = TParser extends Parser + ? inferParser["in"] + : TDefault; +type inferParserOut = TParser extends Parser + ? inferParser["out"] + : TDefault; + +export type TaskPayloadIn = TTask extends Task + ? inferParserIn + : never; + +export type TaskPayloadOut = TTask extends Task + ? inferParserOut + : never; + +export type TaskOutput = TTask extends Task + ? TOutput + : never; + +export type TaskIdentifier = TTask extends Task + ? TIdentifier + : never; + +export type TaskTypes = TTask extends Task< + infer TOutput, + infer TIdentifier, + infer TParser +> + ? { + id: TIdentifier; + payloadIn: inferParserIn; + payloadOut: inferParserOut; + output: TOutput; + } + : never; + +export type TriggerTaskOptions = { + idempotencyKey?: string; + maxAttempts?: number; + startAt?: Date; + startAfter?: number; + concurrencyKey?: string; +}; + +export type Prettify = { + [K in keyof T]: T[K]; +} & {}; + +export function task< + TOutput, + TContext extends AnyContextBuilder, + TIdentifier extends string, + TParser extends Parser | undefined = undefined, +>( + options: TaskOptions +): Task { + return createTask(options); +} + +export function createTask< + TOutput, + TContext extends AnyContextBuilder, + TIndentifier extends string, + TParser extends Parser | undefined = undefined, +>( + params: TaskOptions +): Task { + const task: Task = { + id: params.id, + trigger: async (payload, options) => { + return { + id: "run_1234", + }; + }, + triggerAndWait: async (payload, options) => { + const output = await params.run({ + meta: { run: "run_1234" }, + payload: payload as unknown as inferParserOut, // Actually do the parsing + ctx: {} as inferContext, + }); + + return { + ok: true, + id: "run_1234", + output, + }; + }, + }; + + return task; +} + +export interface TaskLibraryRecord { + [key: string]: AnyTask | TaskLibraryRecord; +} + +export interface TaskLibrary { + _def: { record: TRecord }; +} + +export type AnyTaskLibrary = TaskLibrary; + +export type CreateTaskLibraryOptions = { + [key: string]: AnyTask | AnyTaskLibrary | CreateTaskLibraryOptions; +}; + +export type DecorateCreateTaskLibraryOptions = + { + [K in keyof TTaskLibraryOptions]: TTaskLibraryOptions[K] extends infer $Value + ? $Value extends AnyTask + ? $Value + : $Value extends TaskLibrary + ? TRecord + : $Value extends CreateTaskLibraryOptions + ? DecorateCreateTaskLibraryOptions<$Value> + : never + : never; + }; + +function taskLibrary( + input: TInput +): TaskLibrary>; +function taskLibrary(input: TInput): TaskLibrary; +function taskLibrary(input: TaskLibraryRecord | CreateTaskLibraryOptions) { + // TODO: reserved words + + return { + _def: { + record: input, + }, + }; +} + +// ======== client side +type DecorateTask = { + trigger: (id: TaskIdentifier, payload: TaskPayloadIn) => Promise<{ id: string }>; +}; + +type DecoratedTaskLibraryRecord< + TTaskLibrary extends AnyTaskLibrary, + TRecord extends TaskLibraryRecord, +> = { + [TKey in keyof TRecord]: TRecord[TKey] extends infer $Value + ? $Value extends TaskLibraryRecord + ? DecoratedTaskLibraryRecord + : $Value extends AnyTask + ? DecorateTask<$Value> + : never + : never; +}; + +export type inferTaskLibraryClient = + DecoratedTaskLibraryRecord; + +export type CreateTriggerClient = { + lib: inferTaskLibraryClient; + runs: { + retrieve: (id: string) => Promise<{ status: boolean }>; + }; +}; + +export type CreateTriggerClientOptions = { + secretKey?: string; +}; + +export function createTriggerClient( + options?: CreateTriggerClientOptions +): CreateTriggerClient { + return {} as CreateTriggerClient; +} + +// trigger/my-tasks.ts +const taskOne = task({ + id: "task-1", + run: async () => { + const handle = await taskTwo.trigger({ url: "https://trigger.dev" }); + const result = await taskTwo.triggerAndWait({ url: "https://trigger.dev" }); + + return "foo-bar"; + }, +}); + +const taskTwo = task({ + id: "task-2", + async run(params) { + return { + hello: "world", + payload: params.payload.other, + }; + }, +}); + +const userTaskOne = task({ + id: "user/task-1", + context: contextBuilder4, + run: async (params) => { + return "foo-bar"; + }, +}); + +const userTaskTwo = task({ + id: "user/task-2", + context: contextBuilder3, + run: async (params) => { + return "foo-bar"; + }, +}); + +const zodTaskOne = task({ + id: "zod/task-1", + context: contextBuilder, + schema: z.object({ foo: z.string() }), + run: async (params) => {}, +}); + +const zodTaskTwo = task({ + id: "zod/task-2", + schema: z.object({ foo: z.string(), isAdmin: z.boolean().default(false) }), + context: contextBuilder2, + run: async (params) => { + console.log(params.payload.foo, params.meta.run); + }, +}); + +const valibotTaskOne = task({ + id: "valibot/task-1", + schema: wrap( + v.object({ + foo: v.string(), + }) + ), + run: async (params) => { + await zodTaskOne.trigger({ foo: "bar" }); + await zodTaskTwo.trigger({ foo: "bar" }); + + await valibotTaskTwo.trigger({ foo: "bar" }); + }, +}); + +const valibotTaskTwo = task({ + id: "valibot/task-2", + schema: wrap( + v.object({ + foo: v.string(), + isAdmin: v.optional(v.boolean(), true), + }) + ), + run: async (params) => { + await valibotTaskOne.trigger({ foo: "bar" }); + }, +}); + +// in trigger/lib.ts +const myTaskLibrary = taskLibrary({ + myTasks: { taskOne, taskTwo }, +}); + +const userTaskLibrary = taskLibrary({ + userTaskOne, + userTaskTwo, +}); + +const zodTaskLibrary = taskLibrary({ + zodTaskOne, + zodTaskTwo, +}); + +const valibotTaskLibrary = taskLibrary({ + valibotTaskOne, + valibotTaskTwo, +}); + +export const library = taskLibrary({ + foo: myTaskLibrary, + bar: userTaskLibrary, + zod: zodTaskLibrary, + valibot: valibotTaskLibrary, +}); + +// Export the library type +export type Library = typeof library; + +// Now on the client +const client = createTriggerClient({ + secretKey: "tr_dev_1234", +}); + +client.runs.retrieve("run_12343"); // Call regular API client calls + +// Tasks are now available under lib +client.lib.foo.myTasks.taskOne.trigger("task-1", { hello: "world" }); +client.lib.bar.userTaskOne.trigger("user/task-1", { userId: "user_123" }); +client.lib.bar.userTaskTwo.trigger("user/task-2", { + userId: "user_123", + isAdmin: true, +}); +client.lib.bar.userTaskTwo.trigger("user/task-2", { + userId: "user_123", + isAdmin: false, +}); +client.lib.zod.zodTaskOne.trigger("zod/task-1", { foo: "bar" }); +client.lib.zod.zodTaskTwo.trigger("zod/task-2", { foo: "bar" }); +client.lib.zod.zodTaskTwo.trigger("zod/task-2", { foo: "bar", isAdmin: false }); +client.lib.valibot.valibotTaskTwo.trigger("valibot/task-2", { foo: "bar" }); +client.lib.valibot.valibotTaskTwo.trigger("valibot/task-2", { + foo: "bar", + isAdmin: true, +}); diff --git a/experiments/with-generic-parser.ts b/experiments/with-generic-parser.ts new file mode 100644 index 0000000000..19f50a176a --- /dev/null +++ b/experiments/with-generic-parser.ts @@ -0,0 +1,444 @@ +import { z } from "zod"; +import * as v from "valibot"; +import { wrap } from "@typeschema/valibot"; + +export type ParserZodEsque = { + _input: TInput; + _output: TParsedInput; +}; + +export type ParserMyZodEsque = { + parse: (input: any) => TInput; +}; + +export type ParserSuperstructEsque = { + create: (input: unknown) => TInput; +}; + +export type ParserCustomValidatorEsque = (input: unknown) => Promise | TInput; + +export type ParserYupEsque = { + validateSync: (input: unknown) => TInput; +}; + +export type ParserScaleEsque = { + assert(value: unknown): asserts value is TInput; +}; + +export type ParserWithoutInput = + | ParserCustomValidatorEsque + | ParserMyZodEsque + | ParserScaleEsque + | ParserSuperstructEsque + | ParserYupEsque; + +export type ParserWithInputOutput = ParserZodEsque; + +export type Parser = ParserWithInputOutput | ParserWithoutInput; + +export type inferParser = TParser extends ParserWithInputOutput< + infer $TIn, + infer $TOut +> + ? { + in: $TIn; + out: $TOut; + } + : TParser extends ParserWithoutInput + ? { + in: $InOut; + out: $InOut; + } + : never; + +export type Simplify = TType extends any[] | Date ? TType : { [K in keyof TType]: TType[K] }; + +export type TriggerResult = { + id: string; +}; + +export type TaskRunResult = + | { + ok: true; + id: string; + output: TOutput; + } + | { + ok: false; + id: string; + error: unknown; + }; + +export type RunMetadata = { + run: string; +}; + +export type RunFnParams = { + /** Metadata about the task, run, attempt, queue, environment, organization, project and batch. */ + meta: RunMetadata; + + /** Context added by task middleware */ + ctx: TContext; + + payload: TPayload; +}; + +export type TaskOptions< + TOutput, + TContext extends object, + TIdentifier extends string, + TParser extends Parser | undefined = undefined, +> = { + /** An id for your task. This must be unique inside your project and not change between versions. */ + id: TIdentifier; + + schema?: TParser; + + /** This gets called when a task is triggered. It's where you put the code you want to execute. + * + * @param payload - The payload that is passed to your task when it's triggered. This must be JSON serializable. + * @param params - Metadata about the run. + */ + run: (params: Simplify, TContext>>) => Promise; +}; + +export interface Task< + TOutput, + TIdentifier extends string, + TParser extends Parser | undefined = undefined, +> { + /** + * The id of the task. + */ + id: TIdentifier; + /** + * Trigger a task with the given payload, and continue without waiting for the result. If you want to wait for the result, use `triggerAndWait`. Returns the id of the triggered task run. + * @param payload + * @param options + * @returns TriggerResult + * - `id` - The id of the triggered task run. + */ + trigger: ( + payload: Simplify>, + options?: TriggerTaskOptions + ) => Promise; + + /** + * Trigger a task with the given payload, and wait for the result. Returns the result of the task run + * @param payload + * @param options - Options for the task run + * @returns TaskRunResult + * @example + * ``` + * const result = await task.triggerAndWait({ foo: "bar" }); + * + * if (result.ok) { + * console.log(result.output); + * } else { + * console.error(result.error); + * } + * ``` + */ + triggerAndWait: ( + payload: Simplify>, + options?: TriggerTaskOptions + ) => Promise>; +} + +export type AnyTask = Task; + +type inferParserIn = TParser extends Parser + ? inferParser["in"] + : TDefault; +type inferParserOut = TParser extends Parser + ? inferParser["out"] + : TDefault; + +export type TaskPayloadIn = TTask extends Task + ? inferParserIn + : never; + +export type TaskPayloadOut = TTask extends Task + ? inferParserOut + : never; + +export type TaskOutput = TTask extends Task + ? TOutput + : never; + +export type TaskIdentifier = TTask extends Task + ? TIdentifier + : never; + +export type TaskTypes = TTask extends Task< + infer TOutput, + infer TIdentifier, + infer TParser +> + ? { + id: TIdentifier; + payloadIn: inferParserIn; + payloadOut: inferParserOut; + output: TOutput; + } + : never; + +export type TriggerTaskOptions = { + idempotencyKey?: string; + maxAttempts?: number; + startAt?: Date; + startAfter?: number; + concurrencyKey?: string; +}; + +export type Prettify = { + [K in keyof T]: T[K]; +} & {}; + +export function task< + TOutput, + TContext extends object, + TIdentifier extends string, + TParser extends Parser | undefined = undefined, +>( + options: TaskOptions +): Task { + return createTask(options); +} + +export function createTask< + TOutput, + TContext extends object, + TIndentifier extends string, + TParser extends Parser | undefined = undefined, +>( + params: TaskOptions +): Task { + const task: Task = { + id: params.id, + trigger: async (payload, options) => { + return { + id: "run_1234", + }; + }, + triggerAndWait: async (payload, options) => { + const output = await params.run({ + meta: { run: "run_1234" }, + payload: payload as unknown as inferParserOut, // Actually do the parsing + ctx: {} as TContext, + }); + + return { + ok: true, + id: "run_1234", + output, + }; + }, + }; + + return task; +} + +export interface TaskLibraryRecord { + [key: string]: AnyTask | TaskLibraryRecord; +} + +export interface TaskLibrary { + _def: { record: TRecord }; +} + +export type AnyTaskLibrary = TaskLibrary; + +export type CreateTaskLibraryOptions = { + [key: string]: AnyTask | AnyTaskLibrary | CreateTaskLibraryOptions; +}; + +export type DecorateCreateTaskLibraryOptions = + { + [K in keyof TTaskLibraryOptions]: TTaskLibraryOptions[K] extends infer $Value + ? $Value extends AnyTask + ? $Value + : $Value extends TaskLibrary + ? TRecord + : $Value extends CreateTaskLibraryOptions + ? DecorateCreateTaskLibraryOptions<$Value> + : never + : never; + }; + +function taskLibrary( + input: TInput +): TaskLibrary>; +function taskLibrary(input: TInput): TaskLibrary; +function taskLibrary(input: TaskLibraryRecord | CreateTaskLibraryOptions) { + // TODO: reserved words + + return { + _def: { + record: input, + }, + }; +} + +// ======== client side +type DecorateTask = { + trigger: (id: TaskIdentifier, payload: TaskPayloadIn) => Promise<{ id: string }>; +}; + +type DecoratedTaskLibraryRecord< + TTaskLibrary extends AnyTaskLibrary, + TRecord extends TaskLibraryRecord, +> = { + [TKey in keyof TRecord]: TRecord[TKey] extends infer $Value + ? $Value extends TaskLibraryRecord + ? DecoratedTaskLibraryRecord + : $Value extends AnyTask + ? DecorateTask<$Value> + : never + : never; +}; + +export type inferTaskLibraryClient = + DecoratedTaskLibraryRecord; + +export type CreateTriggerClient = { + lib: inferTaskLibraryClient; + runs: { + retrieve: (id: string) => Promise<{ status: boolean }>; + }; +}; + +export type CreateTriggerClientOptions = { + secretKey?: string; +}; + +export function createTriggerClient( + options?: CreateTriggerClientOptions +): CreateTriggerClient { + return {} as CreateTriggerClient; +} + +// trigger/my-tasks.ts +const taskOne = task({ + id: "task-1", + run: async () => { + const handle = await taskTwo.trigger({ url: "https://trigger.dev" }); + const result = await taskTwo.triggerAndWait({ url: "https://trigger.dev" }); + + return "foo-bar"; + }, +}); + +const taskTwo = task({ + id: "task-2", + async run(params) { + return { + hello: "world", + payload: params.payload.other, + }; + }, +}); + +const userTaskOne = task({ + id: "user/task-1", + run: async (params) => { + return "foo-bar"; + }, +}); + +const userTaskTwo = task({ + id: "user/task-2", + run: async (params) => { + return "foo-bar"; + }, +}); + +const zodTaskOne = task({ + id: "zod/task-1", + schema: z.object({ foo: z.string() }), + run: async (params) => {}, +}); + +const zodTaskTwo = task({ + id: "zod/task-2", + schema: z.object({ foo: z.string(), isAdmin: z.boolean().default(false) }), + run: async (params) => { + console.log(params.payload.foo, params.meta.run); + }, +}); + +const valibotTaskOne = task({ + id: "valibot/task-1", + schema: wrap( + v.object({ + foo: v.string(), + }) + ), + run: async (params) => { + await zodTaskOne.trigger({ foo: "bar" }); + await zodTaskTwo.trigger({ foo: "bar" }); + + await valibotTaskTwo.trigger({ foo: "bar" }); + }, +}); + +const valibotTaskTwo = task({ + id: "valibot/task-2", + schema: wrap( + v.object({ + foo: v.string(), + isAdmin: v.optional(v.boolean(), true), + }) + ), + run: async (params) => { + await valibotTaskOne.trigger({ foo: "bar" }); + }, +}); + +// in trigger/lib.ts +const myTaskLibrary = taskLibrary({ + myTasks: { taskOne, taskTwo }, +}); + +const userTaskLibrary = taskLibrary({ + userTaskOne, + userTaskTwo, +}); + +const zodTaskLibrary = taskLibrary({ + zodTaskOne, + zodTaskTwo, +}); + +const valibotTaskLibrary = taskLibrary({ + valibotTaskOne, + valibotTaskTwo, +}); + +export const library = taskLibrary({ + foo: myTaskLibrary, + bar: userTaskLibrary, + zod: zodTaskLibrary, + valibot: valibotTaskLibrary, +}); + +// Export the library type +export type Library = typeof library; + +// Now on the client +const client = createTriggerClient({ + secretKey: "tr_dev_1234", +}); + +client.runs.retrieve("run_12343"); // Call regular API client calls + +// Tasks are now available under lib +client.lib.foo.myTasks.taskOne.trigger("task-1", { hello: "world" }); +client.lib.bar.userTaskOne.trigger("user/task-1", { userId: "user_123" }); +client.lib.bar.userTaskTwo.trigger("user/task-2", { userId: "user_123", isAdmin: true }); +client.lib.bar.userTaskTwo.trigger("user/task-2", { userId: "user_123", isAdmin: false }); +client.lib.zod.zodTaskOne.trigger("zod/task-1", { foo: "bar" }); +client.lib.zod.zodTaskTwo.trigger("zod/task-2", { foo: "bar" }); +client.lib.zod.zodTaskTwo.trigger("zod/task-2", { foo: "bar", isAdmin: false }); +client.lib.valibot.valibotTaskTwo.trigger("valibot/task-2", { foo: "bar" }); +client.lib.valibot.valibotTaskTwo.trigger("valibot/task-2", { foo: "bar", isAdmin: true }); diff --git a/experiments/with-separate-zod-task.ts b/experiments/with-separate-zod-task.ts new file mode 100644 index 0000000000..985111346d --- /dev/null +++ b/experiments/with-separate-zod-task.ts @@ -0,0 +1,499 @@ +import { z } from "zod"; +import * as v from "valibot"; + +export type ParserZodEsque = { + _input: TInput; + _output: TParsedInput; +}; + +export type ParserMyZodEsque = { + parse: (input: any) => TInput; +}; + +export type ParserSuperstructEsque = { + create: (input: unknown) => TInput; +}; + +export type ParserCustomValidatorEsque = (input: unknown) => Promise | TInput; + +export type ParserYupEsque = { + validateSync: (input: unknown) => TInput; +}; + +export type ParserScaleEsque = { + assert(value: unknown): asserts value is TInput; +}; + +export type ParserWithoutInput = + | ParserCustomValidatorEsque + | ParserMyZodEsque + | ParserScaleEsque + | ParserSuperstructEsque + | ParserYupEsque; + +export type ParserWithInputOutput = ParserZodEsque; + +export type Parser = ParserWithInputOutput | ParserWithoutInput; + +export type inferParser = TParser extends ParserWithInputOutput< + infer $TIn, + infer $TOut +> + ? { + in: $TIn; + out: $TOut; + } + : TParser extends ParserWithoutInput + ? { + in: $InOut; + out: $InOut; + } + : never; + +export type Simplify = TType extends any[] | Date ? TType : { [K in keyof TType]: TType[K] }; + +export type TriggerResult = { + id: string; +}; + +export type TaskRunResult = + | { + ok: true; + id: string; + output: TOutput; + } + | { + ok: false; + id: string; + error: unknown; + }; + +export type RunMetadata = { + run: string; +}; + +export type RunFnParams = { + /** Metadata about the task, run, attempt, queue, environment, organization, project and batch. */ + meta: RunMetadata; + + /** Context added by task middleware */ + ctx: TContext; + + payload: TPayload; +}; + +export type TaskOptions< + TPayloadIn, + TPayloadOut, + TOutput, + TContext extends object, + TIdentifier extends string, +> = { + /** An id for your task. This must be unique inside your project and not change between versions. */ + id: TIdentifier; + + schema?: Parser; + + /** This gets called when a task is triggered. It's where you put the code you want to execute. + * + * @param payload - The payload that is passed to your task when it's triggered. This must be JSON serializable. + * @param params - Metadata about the run. + */ + run: ( + params: Simplify, TContext>> + ) => Promise; +}; + +export interface Task { + /** + * The id of the task. + */ + id: TIdentifier; + /** + * Trigger a task with the given payload, and continue without waiting for the result. If you want to wait for the result, use `triggerAndWait`. Returns the id of the triggered task run. + * @param payload + * @param options + * @returns TriggerResult + * - `id` - The id of the triggered task run. + */ + trigger: ( + payload: Simplify>, + options?: TriggerTaskOptions + ) => Promise; + + /** + * Trigger a task with the given payload, and wait for the result. Returns the result of the task run + * @param payload + * @param options - Options for the task run + * @returns TaskRunResult + * @example + * ``` + * const result = await task.triggerAndWait({ foo: "bar" }); + * + * if (result.ok) { + * console.log(result.output); + * } else { + * console.error(result.error); + * } + * ``` + */ + triggerAndWait: ( + payload: Simplify>, + options?: TriggerTaskOptions + ) => Promise>; +} + +export type AnyTask = Task; + +type IsUnknown = unknown extends T ? (T extends unknown ? true : false) : false; +type NonUnknown = IsUnknown extends true ? never : T; + +export type inferTaskPayloadIn = NonUnknown extends never + ? TPayloadOut + : TPayloadIn; +export type inferTaskPayloadOut = NonUnknown extends never + ? TPayloadIn + : TPayloadOut; + +export type TaskPayloadIn = TTask extends Task< + infer TPayloadIn, + infer TPayloadOut, + any, + string +> + ? inferTaskPayloadIn + : never; + +export type TaskPayloadOut = TTask extends Task< + infer TPayloadIn, + infer TPayloadOut, + any, + string +> + ? inferTaskPayloadOut + : never; + +export type TaskOutput = TTask extends Task + ? TOutput + : never; + +export type TaskIdentifier = TTask extends Task< + any, + any, + any, + infer TIdentifier +> + ? TIdentifier + : never; + +export type TaskTypes = TTask extends Task< + infer TPayloadIn, + infer TPayloadOut, + infer TOutput, + infer TIdentifier +> + ? { + id: TIdentifier; + payloadIn: TPayloadIn; + payloadOut: TPayloadOut; + output: TOutput; + } + : never; + +export type TriggerTaskOptions = { + idempotencyKey?: string; + maxAttempts?: number; + startAt?: Date; + startAfter?: number; + concurrencyKey?: string; +}; + +export type Prettify = { + [K in keyof T]: T[K]; +} & {}; + +export function task< + TPayloadIn, + TPayloadOut, + TOutput, + TContext extends object, + TIdentifier extends string, +>( + options: TaskOptions +): Task { + return createTask(options); +} + +export function createTask< + TPayloadIn, + TPayloadOut, + TOutput, + TContext extends object, + TIndentifier extends string, +>( + params: TaskOptions +): Task { + const task: Task = { + id: params.id, + trigger: async (payload, options) => { + return { + id: "run_1234", + }; + }, + triggerAndWait: async (payload, options) => { + const output = await params.run({ + meta: { run: "run_1234" }, + payload: payload as unknown as inferTaskPayloadOut, + ctx: {} as TContext, + }); + + return { + ok: true, + id: "run_1234", + output, + }; + }, + }; + + return task; +} + +export type ZodTaskOptions< + TOutput, + TContext extends object, + TIdentifier extends string, + TSchema extends z.ZodTypeAny = z.ZodTypeAny, +> = { + schema: TSchema; +} & TaskOptions, z.output, TOutput, TContext, TIdentifier>; + +export function zodTask< + TOutput, + TContext extends object, + TIdentifier extends string, + TSchema extends z.ZodTypeAny = z.ZodTypeAny, +>( + options: ZodTaskOptions +): Task, z.output, TOutput, TIdentifier> { + return createTask, z.output, TOutput, TContext, TIdentifier>(options); +} + +export type ValibotTaskOptions< + TOutput, + TContext extends object, + TIdentifier extends string, + TSchema extends v.BaseSchema = v.AnySchema, +> = { + schema: TSchema; +} & TaskOptions, v.Output, TOutput, TContext, TIdentifier>; + +export function valibotTask< + TOutput, + TContext extends object, + TIdentifier extends string, + TSchema extends v.BaseSchema = v.AnySchema, +>( + options: ValibotTaskOptions +): Task, v.Output, TOutput, TIdentifier> { + return createTask, v.Output, TOutput, TContext, TIdentifier>(options); +} + +export interface TaskLibraryRecord { + [key: string]: AnyTask | TaskLibraryRecord; +} + +export interface TaskLibrary { + _def: { record: TRecord }; +} + +export type AnyTaskLibrary = TaskLibrary; + +export type CreateTaskLibraryOptions = { + [key: string]: AnyTask | AnyTaskLibrary | CreateTaskLibraryOptions; +}; + +export type DecorateCreateTaskLibraryOptions = + { + [K in keyof TTaskLibraryOptions]: TTaskLibraryOptions[K] extends infer $Value + ? $Value extends AnyTask + ? $Value + : $Value extends TaskLibrary + ? TRecord + : $Value extends CreateTaskLibraryOptions + ? DecorateCreateTaskLibraryOptions<$Value> + : never + : never; + }; + +function taskLibrary( + input: TInput +): TaskLibrary>; +function taskLibrary(input: TInput): TaskLibrary; +function taskLibrary(input: TaskLibraryRecord | CreateTaskLibraryOptions) { + // TODO: reserved words + + return { + _def: { + record: input, + }, + }; +} + +// ======== client side +type DecorateTask = { + trigger: (id: TaskIdentifier, payload: TaskPayloadIn) => Promise<{ id: string }>; +}; + +type DecoratedTaskLibraryRecord< + TTaskLibrary extends AnyTaskLibrary, + TRecord extends TaskLibraryRecord, +> = { + [TKey in keyof TRecord]: TRecord[TKey] extends infer $Value + ? $Value extends TaskLibraryRecord + ? DecoratedTaskLibraryRecord + : $Value extends AnyTask + ? DecorateTask<$Value> + : never + : never; +}; + +export type inferTaskLibraryClient = + DecoratedTaskLibraryRecord; + +export type CreateTriggerClient = { + lib: inferTaskLibraryClient; + runs: { + retrieve: (id: string) => Promise<{ status: boolean }>; + }; +}; + +export type CreateTriggerClientOptions = { + secretKey?: string; +}; + +export function createTriggerClient( + options?: CreateTriggerClientOptions +): CreateTriggerClient { + return {} as CreateTriggerClient; +} + +// trigger/my-tasks.ts +const taskOne = task({ + id: "task-1", + run: async () => { + const handle = await taskTwo.trigger({ url: "https://trigger.dev" }); + const result = await taskTwo.triggerAndWait({ url: "https://trigger.dev" }); + + return "foo-bar"; + }, +}); + +const taskTwo = task({ + id: "task-2", + async run(params) { + return { + hello: "world", + payload: params.payload, + }; + }, +}); + +const userTaskOne = task({ + id: "user/task-1", + run: async (params: { payload: { userId: string } }) => { + return "foo-bar"; + }, +}); + +const userTaskTwo = task({ + id: "user/task-2", + run: async (params: { payload: { userId: string; isAdmin: boolean } }) => { + return "foo-bar"; + }, +}); + +const zodTaskOne = task({ + id: "zod/task-1", + schema: z.object({ foo: z.string() }), + run: async (params) => {}, +}); + +const zodTaskTwo = task({ + id: "zod/task-2", + schema: z.object({ foo: z.string(), isAdmin: z.boolean().default(false) }), + run: async (params) => { + console.log(params.payload.foo, params.meta.run); + }, +}); + +const valibotTaskOne = task({ + id: "valibot/task-1", + schema: v.object({ + foo: v.string(), + }), + run: async (params) => { + await zodTaskOne.trigger({ foo: "bar" }); + await zodTaskTwo.trigger({ foo: "bar" }); + + await valibotTaskTwo.trigger({ foo: "bar" }); + }, +}); + +const valibotTaskTwo = task({ + id: "valibot/task-2", + schema: v.object({ + foo: v.string(), + isAdmin: v.optional(v.boolean(), true), + }), + run: async (params) => { + await valibotTaskOne.trigger({ foo: "bar" }); + }, +}); + +// in trigger/lib.ts +const myTaskLibrary = taskLibrary({ + myTasks: { taskOne, taskTwo }, +}); + +const userTaskLibrary = taskLibrary({ + userTaskOne, + userTaskTwo, +}); + +const zodTaskLibrary = taskLibrary({ + zodTaskOne, + zodTaskTwo, +}); + +const valibotTaskLibrary = taskLibrary({ + valibotTaskOne, + valibotTaskTwo, +}); + +export const library = taskLibrary({ + foo: myTaskLibrary, + bar: userTaskLibrary, + zod: zodTaskLibrary, + valibot: valibotTaskLibrary, +}); + +// Export the library type +export type Library = typeof library; + +// Now on the client +const client = createTriggerClient({ + secretKey: "tr_dev_1234", +}); + +client.runs.retrieve("run_12343"); // Call regular API client calls + +// Tasks are now available under lib +client.lib.foo.myTasks.taskOne.trigger("task-1", { hello: "world" }); +client.lib.bar.userTaskOne.trigger("user/task-1", { userId: "user_123" }); +client.lib.bar.userTaskTwo.trigger("user/task-2", { userId: "user_123", isAdmin: true }); +client.lib.bar.userTaskTwo.trigger("user/task-2", { userId: "user_123", isAdmin: false }); +client.lib.zod.zodTaskOne.trigger("zod/task-1", { foo: "bar" }); +client.lib.zod.zodTaskTwo.trigger("zod/task-2", { foo: "bar" }); +client.lib.zod.zodTaskTwo.trigger("zod/task-2", { foo: "bar", isAdmin: false }); +client.lib.valibot.valibotTaskTwo.trigger("valibot/task-2", { foo: "bar" }); +client.lib.valibot.valibotTaskTwo.trigger("valibot/task-2", { foo: "bar", isAdmin: false }); diff --git a/integrations/airtable/CHANGELOG.md b/integrations/airtable/CHANGELOG.md index 0664db0a8d..466ac91882 100644 --- a/integrations/airtable/CHANGELOG.md +++ b/integrations/airtable/CHANGELOG.md @@ -1,5 +1,40 @@ # @trigger.dev/airtable +## 3.0.0-beta.33 + +### Patch Changes + +- @trigger.dev/integration-kit@3.0.0-beta.33 +- @trigger.dev/sdk@3.0.0-beta.33 + +## 3.0.0-beta.32 + +### Patch Changes + +- @trigger.dev/integration-kit@3.0.0-beta.32 +- @trigger.dev/sdk@3.0.0-beta.32 + +## 3.0.0-beta.31 + +### Patch Changes + +- @trigger.dev/integration-kit@3.0.0-beta.31 +- @trigger.dev/sdk@3.0.0-beta.31 + +## 3.0.0-beta.30 + +### Patch Changes + +- @trigger.dev/integration-kit@3.0.0-beta.30 +- @trigger.dev/sdk@3.0.0-beta.30 + +## 3.0.0-beta.29 + +### Patch Changes + +- @trigger.dev/integration-kit@3.0.0-beta.29 +- @trigger.dev/sdk@3.0.0-beta.29 + ## 3.0.0-beta.28 ### Patch Changes diff --git a/integrations/airtable/package.json b/integrations/airtable/package.json index d08437bb81..9ec82eb2be 100644 --- a/integrations/airtable/package.json +++ b/integrations/airtable/package.json @@ -1,6 +1,6 @@ { "name": "@trigger.dev/airtable", - "version": "3.0.0-beta.28", + "version": "3.0.0-beta.33", "description": "Trigger.dev integration for airtable", "main": "./dist/index.js", "types": "./dist/index.d.ts", @@ -25,8 +25,8 @@ "typecheck": "tsc --noEmit" }, "dependencies": { - "@trigger.dev/integration-kit": "workspace:^3.0.0-beta.28", - "@trigger.dev/sdk": "workspace:^3.0.0-beta.28", + "@trigger.dev/integration-kit": "workspace:^3.0.0-beta.33", + "@trigger.dev/sdk": "workspace:^3.0.0-beta.33", "airtable": "^0.12.1", "zod": "3.22.3" }, diff --git a/integrations/github/CHANGELOG.md b/integrations/github/CHANGELOG.md index a042a8520d..eb4354c24f 100644 --- a/integrations/github/CHANGELOG.md +++ b/integrations/github/CHANGELOG.md @@ -1,5 +1,40 @@ # @trigger.dev/github +## 3.0.0-beta.33 + +### Patch Changes + +- @trigger.dev/integration-kit@3.0.0-beta.33 +- @trigger.dev/sdk@3.0.0-beta.33 + +## 3.0.0-beta.32 + +### Patch Changes + +- @trigger.dev/integration-kit@3.0.0-beta.32 +- @trigger.dev/sdk@3.0.0-beta.32 + +## 3.0.0-beta.31 + +### Patch Changes + +- @trigger.dev/integration-kit@3.0.0-beta.31 +- @trigger.dev/sdk@3.0.0-beta.31 + +## 3.0.0-beta.30 + +### Patch Changes + +- @trigger.dev/integration-kit@3.0.0-beta.30 +- @trigger.dev/sdk@3.0.0-beta.30 + +## 3.0.0-beta.29 + +### Patch Changes + +- @trigger.dev/integration-kit@3.0.0-beta.29 +- @trigger.dev/sdk@3.0.0-beta.29 + ## 3.0.0-beta.28 ### Patch Changes diff --git a/integrations/github/package.json b/integrations/github/package.json index 0ed5fb486a..f2b38569d1 100644 --- a/integrations/github/package.json +++ b/integrations/github/package.json @@ -1,6 +1,6 @@ { "name": "@trigger.dev/github", - "version": "3.0.0-beta.28", + "version": "3.0.0-beta.33", "description": "The official GitHub integration for Trigger.dev", "main": "./dist/index.js", "types": "./dist/index.d.ts", @@ -30,8 +30,8 @@ "@octokit/request-error": "^5.0.1", "@octokit/webhooks": "^12.0.10", "octokit": "^3.1.2", - "@trigger.dev/integration-kit": "workspace:^3.0.0-beta.28", - "@trigger.dev/sdk": "workspace:^3.0.0-beta.28", + "@trigger.dev/integration-kit": "workspace:^3.0.0-beta.33", + "@trigger.dev/sdk": "workspace:^3.0.0-beta.33", "zod": "3.22.3" }, "engines": { diff --git a/integrations/linear/CHANGELOG.md b/integrations/linear/CHANGELOG.md index 373cb2aa9b..db5c488e64 100644 --- a/integrations/linear/CHANGELOG.md +++ b/integrations/linear/CHANGELOG.md @@ -1,5 +1,40 @@ # @trigger.dev/linear +## 3.0.0-beta.33 + +### Patch Changes + +- @trigger.dev/integration-kit@3.0.0-beta.33 +- @trigger.dev/sdk@3.0.0-beta.33 + +## 3.0.0-beta.32 + +### Patch Changes + +- @trigger.dev/integration-kit@3.0.0-beta.32 +- @trigger.dev/sdk@3.0.0-beta.32 + +## 3.0.0-beta.31 + +### Patch Changes + +- @trigger.dev/integration-kit@3.0.0-beta.31 +- @trigger.dev/sdk@3.0.0-beta.31 + +## 3.0.0-beta.30 + +### Patch Changes + +- @trigger.dev/integration-kit@3.0.0-beta.30 +- @trigger.dev/sdk@3.0.0-beta.30 + +## 3.0.0-beta.29 + +### Patch Changes + +- @trigger.dev/integration-kit@3.0.0-beta.29 +- @trigger.dev/sdk@3.0.0-beta.29 + ## 3.0.0-beta.28 ### Patch Changes diff --git a/integrations/linear/package.json b/integrations/linear/package.json index ea9703c26d..d4a8d04b92 100644 --- a/integrations/linear/package.json +++ b/integrations/linear/package.json @@ -1,6 +1,6 @@ { "name": "@trigger.dev/linear", - "version": "3.0.0-beta.28", + "version": "3.0.0-beta.33", "description": "Trigger.dev integration for @linear/sdk", "main": "./dist/index.js", "types": "./dist/index.d.ts", @@ -26,8 +26,8 @@ }, "dependencies": { "@linear/sdk": "^8.0.0", - "@trigger.dev/integration-kit": "workspace:^3.0.0-beta.28", - "@trigger.dev/sdk": "workspace:^3.0.0-beta.28", + "@trigger.dev/integration-kit": "workspace:^3.0.0-beta.33", + "@trigger.dev/sdk": "workspace:^3.0.0-beta.33", "zod": "3.22.3" }, "engines": { diff --git a/integrations/openai/CHANGELOG.md b/integrations/openai/CHANGELOG.md index b1abb6d3a8..c2c7b97a7e 100644 --- a/integrations/openai/CHANGELOG.md +++ b/integrations/openai/CHANGELOG.md @@ -1,5 +1,40 @@ # @trigger.dev/slack +## 3.0.0-beta.33 + +### Patch Changes + +- @trigger.dev/integration-kit@3.0.0-beta.33 +- @trigger.dev/sdk@3.0.0-beta.33 + +## 3.0.0-beta.32 + +### Patch Changes + +- @trigger.dev/integration-kit@3.0.0-beta.32 +- @trigger.dev/sdk@3.0.0-beta.32 + +## 3.0.0-beta.31 + +### Patch Changes + +- @trigger.dev/integration-kit@3.0.0-beta.31 +- @trigger.dev/sdk@3.0.0-beta.31 + +## 3.0.0-beta.30 + +### Patch Changes + +- @trigger.dev/integration-kit@3.0.0-beta.30 +- @trigger.dev/sdk@3.0.0-beta.30 + +## 3.0.0-beta.29 + +### Patch Changes + +- @trigger.dev/integration-kit@3.0.0-beta.29 +- @trigger.dev/sdk@3.0.0-beta.29 + ## 3.0.0-beta.28 ### Patch Changes diff --git a/integrations/openai/package.json b/integrations/openai/package.json index 1b994ab1c7..ce448da0f8 100644 --- a/integrations/openai/package.json +++ b/integrations/openai/package.json @@ -1,6 +1,6 @@ { "name": "@trigger.dev/openai", - "version": "3.0.0-beta.28", + "version": "3.0.0-beta.33", "description": "The official OpenAI integration for Trigger.dev", "main": "./dist/index.js", "types": "./dist/index.d.ts", @@ -42,8 +42,8 @@ }, "dependencies": { "openai": "^4.16.1", - "@trigger.dev/sdk": "workspace:^3.0.0-beta.28", - "@trigger.dev/integration-kit": "workspace:^3.0.0-beta.28" + "@trigger.dev/sdk": "workspace:^3.0.0-beta.33", + "@trigger.dev/integration-kit": "workspace:^3.0.0-beta.33" }, "engines": { "node": ">=18.0.0" diff --git a/integrations/plain/CHANGELOG.md b/integrations/plain/CHANGELOG.md index 1c85b8c230..9194e83d1f 100644 --- a/integrations/plain/CHANGELOG.md +++ b/integrations/plain/CHANGELOG.md @@ -1,5 +1,40 @@ # @trigger.dev/plain +## 3.0.0-beta.33 + +### Patch Changes + +- @trigger.dev/integration-kit@3.0.0-beta.33 +- @trigger.dev/sdk@3.0.0-beta.33 + +## 3.0.0-beta.32 + +### Patch Changes + +- @trigger.dev/integration-kit@3.0.0-beta.32 +- @trigger.dev/sdk@3.0.0-beta.32 + +## 3.0.0-beta.31 + +### Patch Changes + +- @trigger.dev/integration-kit@3.0.0-beta.31 +- @trigger.dev/sdk@3.0.0-beta.31 + +## 3.0.0-beta.30 + +### Patch Changes + +- @trigger.dev/integration-kit@3.0.0-beta.30 +- @trigger.dev/sdk@3.0.0-beta.30 + +## 3.0.0-beta.29 + +### Patch Changes + +- @trigger.dev/integration-kit@3.0.0-beta.29 +- @trigger.dev/sdk@3.0.0-beta.29 + ## 3.0.0-beta.28 ### Patch Changes diff --git a/integrations/plain/package.json b/integrations/plain/package.json index d369f1b5f2..c3c51b5bbd 100644 --- a/integrations/plain/package.json +++ b/integrations/plain/package.json @@ -1,6 +1,6 @@ { "name": "@trigger.dev/plain", - "version": "3.0.0-beta.28", + "version": "3.0.0-beta.33", "description": "The official Plain.com integration for Trigger.dev", "main": "./dist/index.js", "types": "./dist/index.d.ts", @@ -24,8 +24,8 @@ "build:tsup": "tsup" }, "dependencies": { - "@trigger.dev/integration-kit": "workspace:^3.0.0-beta.28", - "@trigger.dev/sdk": "workspace:^3.0.0-beta.28", + "@trigger.dev/integration-kit": "workspace:^3.0.0-beta.33", + "@trigger.dev/sdk": "workspace:^3.0.0-beta.33", "@team-plain/typescript-sdk": "^2.7.0" }, "engines": { diff --git a/integrations/replicate/CHANGELOG.md b/integrations/replicate/CHANGELOG.md index 9b59ec88b8..7c641a12f8 100644 --- a/integrations/replicate/CHANGELOG.md +++ b/integrations/replicate/CHANGELOG.md @@ -1,5 +1,40 @@ # @trigger.dev/replicate +## 3.0.0-beta.33 + +### Patch Changes + +- @trigger.dev/integration-kit@3.0.0-beta.33 +- @trigger.dev/sdk@3.0.0-beta.33 + +## 3.0.0-beta.32 + +### Patch Changes + +- @trigger.dev/integration-kit@3.0.0-beta.32 +- @trigger.dev/sdk@3.0.0-beta.32 + +## 3.0.0-beta.31 + +### Patch Changes + +- @trigger.dev/integration-kit@3.0.0-beta.31 +- @trigger.dev/sdk@3.0.0-beta.31 + +## 3.0.0-beta.30 + +### Patch Changes + +- @trigger.dev/integration-kit@3.0.0-beta.30 +- @trigger.dev/sdk@3.0.0-beta.30 + +## 3.0.0-beta.29 + +### Patch Changes + +- @trigger.dev/integration-kit@3.0.0-beta.29 +- @trigger.dev/sdk@3.0.0-beta.29 + ## 3.0.0-beta.28 ### Patch Changes diff --git a/integrations/replicate/package.json b/integrations/replicate/package.json index d3d99c4132..68ba6e34f9 100644 --- a/integrations/replicate/package.json +++ b/integrations/replicate/package.json @@ -1,6 +1,6 @@ { "name": "@trigger.dev/replicate", - "version": "3.0.0-beta.28", + "version": "3.0.0-beta.33", "description": "Trigger.dev integration for replicate", "main": "./dist/index.js", "types": "./dist/index.d.ts", @@ -25,8 +25,8 @@ "typecheck": "tsc --noEmit" }, "dependencies": { - "@trigger.dev/integration-kit": "workspace:^3.0.0-beta.28", - "@trigger.dev/sdk": "workspace:^3.0.0-beta.28", + "@trigger.dev/integration-kit": "workspace:^3.0.0-beta.33", + "@trigger.dev/sdk": "workspace:^3.0.0-beta.33", "replicate": "^0.18.1", "zod": "3.22.3" }, diff --git a/integrations/resend/CHANGELOG.md b/integrations/resend/CHANGELOG.md index c60aa79342..ef2e919f7b 100644 --- a/integrations/resend/CHANGELOG.md +++ b/integrations/resend/CHANGELOG.md @@ -1,5 +1,40 @@ # @trigger.dev/resend +## 3.0.0-beta.33 + +### Patch Changes + +- @trigger.dev/integration-kit@3.0.0-beta.33 +- @trigger.dev/sdk@3.0.0-beta.33 + +## 3.0.0-beta.32 + +### Patch Changes + +- @trigger.dev/integration-kit@3.0.0-beta.32 +- @trigger.dev/sdk@3.0.0-beta.32 + +## 3.0.0-beta.31 + +### Patch Changes + +- @trigger.dev/integration-kit@3.0.0-beta.31 +- @trigger.dev/sdk@3.0.0-beta.31 + +## 3.0.0-beta.30 + +### Patch Changes + +- @trigger.dev/integration-kit@3.0.0-beta.30 +- @trigger.dev/sdk@3.0.0-beta.30 + +## 3.0.0-beta.29 + +### Patch Changes + +- @trigger.dev/integration-kit@3.0.0-beta.29 +- @trigger.dev/sdk@3.0.0-beta.29 + ## 3.0.0-beta.28 ### Patch Changes diff --git a/integrations/resend/package.json b/integrations/resend/package.json index 52565b96b2..5e1d0fe705 100644 --- a/integrations/resend/package.json +++ b/integrations/resend/package.json @@ -1,6 +1,6 @@ { "name": "@trigger.dev/resend", - "version": "3.0.0-beta.28", + "version": "3.0.0-beta.33", "description": "The official Resend.com integration for Trigger.dev", "main": "./dist/index.js", "types": "./dist/index.d.ts", @@ -24,8 +24,8 @@ "build:tsup": "tsup" }, "dependencies": { - "@trigger.dev/integration-kit": "workspace:^3.0.0-beta.28", - "@trigger.dev/sdk": "workspace:^3.0.0-beta.28", + "@trigger.dev/integration-kit": "workspace:^3.0.0-beta.33", + "@trigger.dev/sdk": "workspace:^3.0.0-beta.33", "resend": "^2.1.0" }, "engines": { diff --git a/integrations/sendgrid/CHANGELOG.md b/integrations/sendgrid/CHANGELOG.md index 077668babb..2170ce650f 100644 --- a/integrations/sendgrid/CHANGELOG.md +++ b/integrations/sendgrid/CHANGELOG.md @@ -1,5 +1,40 @@ # @trigger.dev/sendgrid +## 3.0.0-beta.33 + +### Patch Changes + +- @trigger.dev/integration-kit@3.0.0-beta.33 +- @trigger.dev/sdk@3.0.0-beta.33 + +## 3.0.0-beta.32 + +### Patch Changes + +- @trigger.dev/integration-kit@3.0.0-beta.32 +- @trigger.dev/sdk@3.0.0-beta.32 + +## 3.0.0-beta.31 + +### Patch Changes + +- @trigger.dev/integration-kit@3.0.0-beta.31 +- @trigger.dev/sdk@3.0.0-beta.31 + +## 3.0.0-beta.30 + +### Patch Changes + +- @trigger.dev/integration-kit@3.0.0-beta.30 +- @trigger.dev/sdk@3.0.0-beta.30 + +## 3.0.0-beta.29 + +### Patch Changes + +- @trigger.dev/integration-kit@3.0.0-beta.29 +- @trigger.dev/sdk@3.0.0-beta.29 + ## 3.0.0-beta.28 ### Patch Changes diff --git a/integrations/sendgrid/package.json b/integrations/sendgrid/package.json index fe275b1468..d8cdb02ddd 100644 --- a/integrations/sendgrid/package.json +++ b/integrations/sendgrid/package.json @@ -1,6 +1,6 @@ { "name": "@trigger.dev/sendgrid", - "version": "3.0.0-beta.28", + "version": "3.0.0-beta.33", "description": "Trigger.dev integration for @sendgrid/mail", "main": "./dist/index.js", "types": "./dist/index.d.ts", @@ -26,8 +26,8 @@ }, "dependencies": { "@sendgrid/mail": "^7.7.0", - "@trigger.dev/sdk": "workspace:^3.0.0-beta.28", - "@trigger.dev/integration-kit": "workspace:^3.0.0-beta.28" + "@trigger.dev/sdk": "workspace:^3.0.0-beta.33", + "@trigger.dev/integration-kit": "workspace:^3.0.0-beta.33" }, "engines": { "node": ">=16.8.0" diff --git a/integrations/shopify/CHANGELOG.md b/integrations/shopify/CHANGELOG.md index 173934bdf5..cf04bfd9ee 100644 --- a/integrations/shopify/CHANGELOG.md +++ b/integrations/shopify/CHANGELOG.md @@ -1,5 +1,41 @@ # @trigger.dev/shopify +## 3.0.0-beta.33 + +### Patch Changes + +- @trigger.dev/integration-kit@3.0.0-beta.33 +- @trigger.dev/sdk@3.0.0-beta.33 + +## 3.0.0-beta.32 + +### Patch Changes + +- @trigger.dev/integration-kit@3.0.0-beta.32 +- @trigger.dev/sdk@3.0.0-beta.32 + +## 3.0.0-beta.31 + +### Patch Changes + +- @trigger.dev/integration-kit@3.0.0-beta.31 +- @trigger.dev/sdk@3.0.0-beta.31 + +## 3.0.0-beta.30 + +### Patch Changes + +- @trigger.dev/integration-kit@3.0.0-beta.30 +- @trigger.dev/sdk@3.0.0-beta.30 + +## 3.0.0-beta.29 + +### Patch Changes + +- 1fd26ff98: improved error messages when a shopify webhook fails to register + - @trigger.dev/integration-kit@3.0.0-beta.29 + - @trigger.dev/sdk@3.0.0-beta.29 + ## 3.0.0-beta.28 ### Patch Changes diff --git a/integrations/shopify/package.json b/integrations/shopify/package.json index 4f3d602a07..b857d4e094 100644 --- a/integrations/shopify/package.json +++ b/integrations/shopify/package.json @@ -1,6 +1,6 @@ { "name": "@trigger.dev/shopify", - "version": "3.0.0-beta.28", + "version": "3.0.0-beta.33", "description": "Trigger.dev integration for @shopify/shopify-api", "main": "./dist/index.js", "types": "./dist/index.d.ts", @@ -26,8 +26,8 @@ }, "dependencies": { "@shopify/shopify-api": "^8.0.2", - "@trigger.dev/sdk": "workspace:^3.0.0-beta.28", - "@trigger.dev/integration-kit": "workspace:^3.0.0-beta.28", + "@trigger.dev/sdk": "workspace:^3.0.0-beta.33", + "@trigger.dev/integration-kit": "workspace:^3.0.0-beta.33", "zod": "3.22.3" }, "engines": { diff --git a/integrations/shopify/src/index.ts b/integrations/shopify/src/index.ts index 2bc03a14c4..882f866e60 100644 --- a/integrations/shopify/src/index.ts +++ b/integrations/shopify/src/index.ts @@ -63,8 +63,30 @@ export class Shopify implements TriggerIntegration { throw `Can't create Shopify integration (${options.id}) as apiKey was undefined`; } + if (Object.keys(options).includes("apiSecretKey") && !options.apiSecretKey) { + throw `Can't create Shopify integration (${options.id}) as apiSecretKey was undefined`; + } + + if (Object.keys(options).includes("adminAccessToken") && !options.adminAccessToken) { + throw `Can't create Shopify integration (${options.id}) as adminAccessToken was undefined`; + } + + if (Object.keys(options).includes("hostName") && !options.hostName) { + throw `Can't create Shopify integration (${options.id}) as hostName was undefined`; + } + this._options = options; - this._shopDomain = this._options.hostName.replace("http://", "").replace("https://", ""); + // Extract the shop domain if user has entered the full URL + this._shopDomain = this._options.hostName + .replace(/^https?:\/\//, "") // Remove http:// or https:// + .replace(/\/$/, ""); // Remove trailing slash if it exists (e.g. `example.myshopify.com/`) + + // Regular expression to ensure the shopDomain is a valid `.myshopify.com` domain + const shopifyDomainPattern = /^[a-zA-Z0-9-]+\.myshopify\.com$/; + + if (!shopifyDomainPattern.test(this._shopDomain)) { + throw `Can't create Shopify integration (${options.id}) because hostName should be a valid ".myshopify.com" domain, not a custom primary domain. For example: my-domain.myshopify.com`; + } } get authSource() { diff --git a/integrations/shopify/src/webhooks.ts b/integrations/shopify/src/webhooks.ts index c39bcd8fa7..8bcc8b0dec 100644 --- a/integrations/shopify/src/webhooks.ts +++ b/integrations/shopify/src/webhooks.ts @@ -86,21 +86,36 @@ export function createWebhookEventSource(integration: Shopify) { key: (params) => params.topic, crud: { create: async ({ io, ctx }) => { - const webhook = await io.integration.rest.Webhook.save("create-webhook", { - fromData: { - address: ctx.url, - topic: ctx.params.topic, - // fields: ctx.params.fields, - }, - }); - - const clientSecret = await io.integration.runTask( - "get-client-secret", - async (client) => client.config.apiSecretKey - ); - - await io.store.job.set("set-id", "webhook-id", webhook.id); - await io.store.job.set("set-secret", "webhook-secret", clientSecret); + try { + const webhook = await io.integration.rest.Webhook.save("create-webhook", { + fromData: { + address: ctx.url, + topic: ctx.params.topic, + // fields: ctx.params.fields, + }, + }); + + if (!webhook.id) { + throw new Error( + "Failed to create webhook. Ensure your Shopfiy client configuration is correct. Have you set the correct access scopes? Are you using the primary myshopify.com domain?" + ); + } + + const clientSecret = await io.integration.runTask( + "get-client-secret", + async (client) => client.config.apiSecretKey + ); + + await io.store.job.set("set-id", "webhook-id", webhook.id); + await io.store.job.set("set-secret", "webhook-secret", clientSecret); + } catch (error) { + if (error instanceof Error) { + await io.logger.error(`Failed to create webhook: ${error.message}`); + } else { + await io.logger.error("Failed to create webhook", { rawError: error }); + } + throw error; + } }, delete: async ({ io, ctx }) => { const webhookId = await io.store.job.get("get-webhook-id", "webhook-id"); @@ -109,23 +124,41 @@ export function createWebhookEventSource(integration: Shopify) { throw new Error("Missing webhook ID for delete operation."); } - await io.integration.rest.Webhook.delete("delete-webhook", { - id: webhookId, - }); + try { + await io.integration.rest.Webhook.delete("delete-webhook", { + id: webhookId, + }); + } catch (error) { + if (error instanceof Error) { + await io.logger.error(`Failed to delete webhook: ${error.message}`); + } else { + await io.logger.error("Failed to delete webhook", { rawError: error }); + } + throw error; + } await io.store.job.delete("delete-webhook-id", "webhook-id"); }, update: async ({ io, ctx }) => { const webhookId = await io.store.job.get("get-webhook-id", "webhook-id"); - await io.integration.rest.Webhook.save("update-webhook", { - fromData: { - id: webhookId, - address: ctx.url, - topic: ctx.params.topic, - // fields: ctx.params.fields, - }, - }); + try { + await io.integration.rest.Webhook.save("update-webhook", { + fromData: { + id: webhookId, + address: ctx.url, + topic: ctx.params.topic, + // fields: ctx.params.fields, + }, + }); + } catch (error) { + if (error instanceof Error) { + await io.logger.error(`Failed to update webhook: ${error.message}`); + } else { + await io.logger.error("Failed to update webhook", { rawError: error }); + } + throw error; + } }, }, verify: async ({ request, client, ctx }) => { diff --git a/integrations/slack/CHANGELOG.md b/integrations/slack/CHANGELOG.md index 689b570e8a..c7f844eed3 100644 --- a/integrations/slack/CHANGELOG.md +++ b/integrations/slack/CHANGELOG.md @@ -1,5 +1,35 @@ # @trigger.dev/slack +## 3.0.0-beta.33 + +### Patch Changes + +- @trigger.dev/sdk@3.0.0-beta.33 + +## 3.0.0-beta.32 + +### Patch Changes + +- @trigger.dev/sdk@3.0.0-beta.32 + +## 3.0.0-beta.31 + +### Patch Changes + +- @trigger.dev/sdk@3.0.0-beta.31 + +## 3.0.0-beta.30 + +### Patch Changes + +- @trigger.dev/sdk@3.0.0-beta.30 + +## 3.0.0-beta.29 + +### Patch Changes + +- @trigger.dev/sdk@3.0.0-beta.29 + ## 3.0.0-beta.28 ### Patch Changes diff --git a/integrations/slack/package.json b/integrations/slack/package.json index 63e1e20d42..395c4e46db 100644 --- a/integrations/slack/package.json +++ b/integrations/slack/package.json @@ -1,6 +1,6 @@ { "name": "@trigger.dev/slack", - "version": "3.0.0-beta.28", + "version": "3.0.0-beta.33", "description": "The official Slack integration for Trigger.dev", "main": "./dist/index.js", "types": "./dist/index.d.ts", @@ -25,7 +25,7 @@ }, "dependencies": { "@slack/web-api": "^6.8.1", - "@trigger.dev/sdk": "workspace:^3.0.0-beta.28", + "@trigger.dev/sdk": "workspace:^3.0.0-beta.33", "zod": "3.22.3" }, "engines": { diff --git a/integrations/stripe/CHANGELOG.md b/integrations/stripe/CHANGELOG.md index 0b42c1574d..d28826d5b3 100644 --- a/integrations/stripe/CHANGELOG.md +++ b/integrations/stripe/CHANGELOG.md @@ -1,5 +1,40 @@ # @trigger.dev/stripe +## 3.0.0-beta.33 + +### Patch Changes + +- @trigger.dev/integration-kit@3.0.0-beta.33 +- @trigger.dev/sdk@3.0.0-beta.33 + +## 3.0.0-beta.32 + +### Patch Changes + +- @trigger.dev/integration-kit@3.0.0-beta.32 +- @trigger.dev/sdk@3.0.0-beta.32 + +## 3.0.0-beta.31 + +### Patch Changes + +- @trigger.dev/integration-kit@3.0.0-beta.31 +- @trigger.dev/sdk@3.0.0-beta.31 + +## 3.0.0-beta.30 + +### Patch Changes + +- @trigger.dev/integration-kit@3.0.0-beta.30 +- @trigger.dev/sdk@3.0.0-beta.30 + +## 3.0.0-beta.29 + +### Patch Changes + +- @trigger.dev/integration-kit@3.0.0-beta.29 +- @trigger.dev/sdk@3.0.0-beta.29 + ## 3.0.0-beta.28 ### Patch Changes diff --git a/integrations/stripe/package.json b/integrations/stripe/package.json index 9deff6025a..2031bc5d8b 100644 --- a/integrations/stripe/package.json +++ b/integrations/stripe/package.json @@ -1,6 +1,6 @@ { "name": "@trigger.dev/stripe", - "version": "3.0.0-beta.28", + "version": "3.0.0-beta.33", "description": "Trigger.dev integration for stripe", "main": "./dist/index.js", "types": "./dist/index.d.ts", @@ -25,8 +25,8 @@ "typecheck": "tsc --noEmit" }, "dependencies": { - "@trigger.dev/integration-kit": "workspace:^3.0.0-beta.28", - "@trigger.dev/sdk": "workspace:^3.0.0-beta.28", + "@trigger.dev/integration-kit": "workspace:^3.0.0-beta.33", + "@trigger.dev/sdk": "workspace:^3.0.0-beta.33", "stripe": "^12.14.0", "zod": "3.22.3" }, diff --git a/integrations/supabase/CHANGELOG.md b/integrations/supabase/CHANGELOG.md index 69c829e4dc..418cff6ac3 100644 --- a/integrations/supabase/CHANGELOG.md +++ b/integrations/supabase/CHANGELOG.md @@ -1,5 +1,40 @@ # @trigger.dev/supabase +## 3.0.0-beta.33 + +### Patch Changes + +- @trigger.dev/integration-kit@3.0.0-beta.33 +- @trigger.dev/sdk@3.0.0-beta.33 + +## 3.0.0-beta.32 + +### Patch Changes + +- @trigger.dev/integration-kit@3.0.0-beta.32 +- @trigger.dev/sdk@3.0.0-beta.32 + +## 3.0.0-beta.31 + +### Patch Changes + +- @trigger.dev/integration-kit@3.0.0-beta.31 +- @trigger.dev/sdk@3.0.0-beta.31 + +## 3.0.0-beta.30 + +### Patch Changes + +- @trigger.dev/integration-kit@3.0.0-beta.30 +- @trigger.dev/sdk@3.0.0-beta.30 + +## 3.0.0-beta.29 + +### Patch Changes + +- @trigger.dev/integration-kit@3.0.0-beta.29 +- @trigger.dev/sdk@3.0.0-beta.29 + ## 3.0.0-beta.28 ### Patch Changes diff --git a/integrations/supabase/package.json b/integrations/supabase/package.json index abcd445200..0d7ca33a5d 100644 --- a/integrations/supabase/package.json +++ b/integrations/supabase/package.json @@ -1,6 +1,6 @@ { "name": "@trigger.dev/supabase", - "version": "3.0.0-beta.28", + "version": "3.0.0-beta.33", "description": "Trigger.dev integration for @supabase/supabase-js", "main": "./dist/index.js", "types": "./dist/index.d.ts", @@ -26,8 +26,8 @@ }, "dependencies": { "@supabase/supabase-js": "^2.26.0", - "@trigger.dev/integration-kit": "workspace:^3.0.0-beta.28", - "@trigger.dev/sdk": "workspace:^3.0.0-beta.28", + "@trigger.dev/integration-kit": "workspace:^3.0.0-beta.33", + "@trigger.dev/sdk": "workspace:^3.0.0-beta.33", "supabase-management-js": "^1.0.0", "zod": "3.22.3" }, diff --git a/integrations/typeform/CHANGELOG.md b/integrations/typeform/CHANGELOG.md index 6bb3651a0e..dcdbb0da90 100644 --- a/integrations/typeform/CHANGELOG.md +++ b/integrations/typeform/CHANGELOG.md @@ -1,5 +1,40 @@ # @trigger.dev/typeform +## 3.0.0-beta.33 + +### Patch Changes + +- @trigger.dev/integration-kit@3.0.0-beta.33 +- @trigger.dev/sdk@3.0.0-beta.33 + +## 3.0.0-beta.32 + +### Patch Changes + +- @trigger.dev/integration-kit@3.0.0-beta.32 +- @trigger.dev/sdk@3.0.0-beta.32 + +## 3.0.0-beta.31 + +### Patch Changes + +- @trigger.dev/integration-kit@3.0.0-beta.31 +- @trigger.dev/sdk@3.0.0-beta.31 + +## 3.0.0-beta.30 + +### Patch Changes + +- @trigger.dev/integration-kit@3.0.0-beta.30 +- @trigger.dev/sdk@3.0.0-beta.30 + +## 3.0.0-beta.29 + +### Patch Changes + +- @trigger.dev/integration-kit@3.0.0-beta.29 +- @trigger.dev/sdk@3.0.0-beta.29 + ## 3.0.0-beta.28 ### Patch Changes diff --git a/integrations/typeform/package.json b/integrations/typeform/package.json index 95958ed415..c247f1cbab 100644 --- a/integrations/typeform/package.json +++ b/integrations/typeform/package.json @@ -1,6 +1,6 @@ { "name": "@trigger.dev/typeform", - "version": "3.0.0-beta.28", + "version": "3.0.0-beta.33", "description": "The official Typeform integration for Trigger.dev", "main": "./dist/index.js", "types": "./dist/index.d.ts", @@ -24,8 +24,8 @@ "typecheck": "tsc --noEmit" }, "dependencies": { - "@trigger.dev/integration-kit": "workspace:^3.0.0-beta.28", - "@trigger.dev/sdk": "workspace:^3.0.0-beta.28", + "@trigger.dev/integration-kit": "workspace:^3.0.0-beta.33", + "@trigger.dev/sdk": "workspace:^3.0.0-beta.33", "@typeform/api-client": "^1.8.0", "zod": "3.22.3" }, diff --git a/packages/astro/CHANGELOG.md b/packages/astro/CHANGELOG.md index 2108db0d69..f17bcc8af3 100644 --- a/packages/astro/CHANGELOG.md +++ b/packages/astro/CHANGELOG.md @@ -1,5 +1,35 @@ # @trigger.dev/astro +## 3.0.0-beta.33 + +### Patch Changes + +- @trigger.dev/sdk@3.0.0-beta.33 + +## 3.0.0-beta.32 + +### Patch Changes + +- @trigger.dev/sdk@3.0.0-beta.32 + +## 3.0.0-beta.31 + +### Patch Changes + +- @trigger.dev/sdk@3.0.0-beta.31 + +## 3.0.0-beta.30 + +### Patch Changes + +- @trigger.dev/sdk@3.0.0-beta.30 + +## 3.0.0-beta.29 + +### Patch Changes + +- @trigger.dev/sdk@3.0.0-beta.29 + ## 3.0.0-beta.28 ### Patch Changes diff --git a/packages/astro/package.json b/packages/astro/package.json index 64a9532238..bcd5338623 100644 --- a/packages/astro/package.json +++ b/packages/astro/package.json @@ -1,7 +1,7 @@ { "name": "@trigger.dev/astro", "description": "An Astro-native integration for Trigger.dev background jobs platform", - "version": "3.0.0-beta.28", + "version": "3.0.0-beta.33", "main": "./dist/index.js", "types": "./dist/index.d.ts", "files": [ @@ -20,7 +20,7 @@ "build:tsup": "tsup" }, "peerDependencies": { - "@trigger.dev/sdk": "workspace:^3.0.0-beta.28" + "@trigger.dev/sdk": "workspace:^3.0.0-beta.33" }, "devDependencies": { "astro": "^3.0.12", diff --git a/packages/cli-v3/CHANGELOG.md b/packages/cli-v3/CHANGELOG.md index 751047403d..4eaa4f5a93 100644 --- a/packages/cli-v3/CHANGELOG.md +++ b/packages/cli-v3/CHANGELOG.md @@ -1,5 +1,42 @@ # trigger.dev +## 3.0.0-beta.33 + +### Patch Changes + +- 598906fc4: Fix for typo in v3 CLI login command +- Updated dependencies [6a379e4e9] + - @trigger.dev/core@3.0.0-beta.33 + +## 3.0.0-beta.32 + +### Patch Changes + +- f96f1e91a: Better handle issues with resolving dependency versions during deploy + - @trigger.dev/core@3.0.0-beta.32 + +## 3.0.0-beta.31 + +### Patch Changes + +- b8477ea2b: Fixes an issue with scoped packages in additionalPackages option + - @trigger.dev/core@3.0.0-beta.31 + +## 3.0.0-beta.30 + +### Patch Changes + +- 0e919f56f: Better handle uncaught exceptions +- Updated dependencies [1477a2e30] +- Updated dependencies [0e919f56f] + - @trigger.dev/core@3.0.0-beta.30 + +## 3.0.0-beta.29 + +### Patch Changes + +- @trigger.dev/core@3.0.0-beta.29 + ## 3.0.0-beta.28 ### Patch Changes diff --git a/packages/cli-v3/package.json b/packages/cli-v3/package.json index a722e070bf..183eefbb03 100644 --- a/packages/cli-v3/package.json +++ b/packages/cli-v3/package.json @@ -1,6 +1,6 @@ { "name": "trigger.dev", - "version": "3.0.0-beta.28", + "version": "3.0.0-beta.33", "description": "A Command-Line Interface for Trigger.dev (v3) projects", "main": "./dist/index.js", "types": "./dist/index.d.ts", @@ -86,7 +86,7 @@ "@opentelemetry/sdk-trace-base": "^1.22.0", "@opentelemetry/sdk-trace-node": "^1.22.0", "@opentelemetry/semantic-conventions": "^1.22.0", - "@trigger.dev/core": "workspace:3.0.0-beta.28", + "@trigger.dev/core": "workspace:3.0.0-beta.33", "@types/degit": "^2.8.3", "chalk": "^5.2.0", "chokidar": "^3.5.3", @@ -96,7 +96,7 @@ "dotenv": "^16.4.4", "esbuild": "^0.19.11", "evt": "^2.4.13", - "execa": "^8.0.0", + "execa": "^9.1.0", "find-up": "^7.0.0", "glob": "^10.3.10", "gradient-string": "^2.0.2", diff --git a/packages/cli-v3/src/commands/deploy.ts b/packages/cli-v3/src/commands/deploy.ts index fa1408b90f..262f2bf1d2 100644 --- a/packages/cli-v3/src/commands/deploy.ts +++ b/packages/cli-v3/src/commands/deploy.ts @@ -79,6 +79,7 @@ const DeployCommandOptions = CommonCommandOptions.extend({ apiUrl: z.string().optional(), saveLogs: z.boolean().default(false), skipUpdateCheck: z.boolean().default(false), + noCache: z.boolean().default(false), }); type DeployCommandOptions = z.infer; @@ -108,6 +109,12 @@ export function configureDeployCommand(program: Command) { "Build and load the image using your local Docker. Use the --registry option to specify the registry to push the image to when using --self-hosted, or just use --push-image to push to the default registry." ).hideHelp() ) + .addOption( + new CommandOption( + "--no-cache", + "Do not use the cache when building the image. This will slow down the build process but can be useful if you are experiencing issues with the cache." + ).hideHelp() + ) .addOption( new CommandOption( "--push", @@ -291,6 +298,7 @@ async function _deployCommand(dir: string, options: DeployCommandOptions) { buildPlatform: options.buildPlatform, pushImage: options.push, selfHostedRegistry: !!options.registry, + noCache: options.noCache, }); } @@ -316,6 +324,7 @@ async function _deployCommand(dir: string, options: DeployCommandOptions) { projectRef: resolvedConfig.config.project, loadImage: options.loadImage, buildPlatform: options.buildPlatform, + noCache: options.noCache, }, deploymentSpinner ); @@ -389,6 +398,7 @@ async function _deployCommand(dir: string, options: DeployCommandOptions) { deploymentResponse.data.id, { imageReference, + selfHosted: options.selfHosted, } ); @@ -752,6 +762,7 @@ type BuildAndPushImageOptions = { projectRef: string; loadImage: boolean; buildPlatform: string; + noCache: boolean; }; type BuildAndPushImageResults = @@ -795,6 +806,7 @@ async function buildAndPushImage( "build", "-f", "Containerfile", + options.noCache ? "--no-cache" : undefined, "--platform", options.buildPlatform, "--provenance", @@ -920,6 +932,7 @@ async function buildAndPushSelfHostedImage( "build", "-f", "Containerfile", + options.noCache ? "--no-cache" : undefined, "--platform", options.buildPlatform, "--build-arg", @@ -937,7 +950,9 @@ async function buildAndPushSelfHostedImage( ".", // The build context ].filter(Boolean) as string[]; - logger.debug(`docker ${buildArgs.join(" ")}`); + logger.debug(`docker ${buildArgs.join(" ")}`, { + cwd: options.cwd, + }); span.setAttribute("docker.command.build", `docker ${buildArgs.join(" ")}`); @@ -1102,7 +1117,9 @@ async function compileProject( .replace("__TASKS__", createTaskFileImports(taskFiles)) .replace( "__WORKER_SETUP__", - `import { tracingSDK } from "${escapeImportPath(workerSetupPath)}";` + `import { tracingSDK, otelTracer, otelLogger } from "${escapeImportPath( + workerSetupPath + )}";` ); if (configPath) { diff --git a/packages/cli-v3/src/commands/dev.tsx b/packages/cli-v3/src/commands/dev.tsx index 569b9e9e9c..8704021209 100644 --- a/packages/cli-v3/src/commands/dev.tsx +++ b/packages/cli-v3/src/commands/dev.tsx @@ -364,7 +364,9 @@ function useDev({ .replace("__TASKS__", createTaskFileImports(taskFiles)) .replace( "__WORKER_SETUP__", - `import { tracingSDK, sender } from "${escapeImportPath(workerSetupPath)}";` + `import { tracingSDK, otelTracer, otelLogger, sender } from "${escapeImportPath( + workerSetupPath + )}";` ); if (configPath) { diff --git a/packages/cli-v3/src/commands/init.ts b/packages/cli-v3/src/commands/init.ts index 675728a8e5..3611b0486c 100644 --- a/packages/cli-v3/src/commands/init.ts +++ b/packages/cli-v3/src/commands/init.ts @@ -5,7 +5,7 @@ import { recordSpanException } from "@trigger.dev/core/v3/workers"; import chalk from "chalk"; import { Command } from "commander"; import { execa } from "execa"; -import { applyEdits, modify } from "jsonc-parser"; +import { applyEdits, modify, findNodeAtLocation, parseTree, getNodeValue } from "jsonc-parser"; import { writeFile } from "node:fs/promises"; import { join, relative, resolve } from "node:path"; import terminalLink from "terminal-link"; @@ -329,8 +329,29 @@ async function addConfigFileToTsConfig(dir: string, options: InitCommandOptions) }); const tsconfigContent = await readFile(tsconfigPath); + const tsconfigContentTree = parseTree(tsconfigContent, undefined); + if (!tsconfigContentTree) { + span.end(); + + return; + } + + const tsconfigIncludeOption = findNodeAtLocation(tsconfigContentTree, ["include"]); + if (!tsconfigIncludeOption) { + span.end(); + + return; + } + + const tsConfigFileName = "trigger.config.ts"; + const tsconfigIncludeOptionValue: string[] = getNodeValue(tsconfigIncludeOption); + if (tsconfigIncludeOptionValue.includes(tsConfigFileName)) { + span.end(); + + return; + } - const edits = modify(tsconfigContent, ["include", -1], "trigger.config.ts", { + const edits = modify(tsconfigContent, ["include", -1], tsConfigFileName, { isArrayInsertion: true, formattingOptions: { tabSize: 2, diff --git a/packages/cli-v3/src/commands/login.ts b/packages/cli-v3/src/commands/login.ts index 96279cc5a4..7efb3c99a4 100644 --- a/packages/cli-v3/src/commands/login.ts +++ b/packages/cli-v3/src/commands/login.ts @@ -326,7 +326,7 @@ async function createAuthorizationCode(apiClient: CliApiClient) { try { //generate authorization code const createAuthCodeSpinner = spinner(); - createAuthCodeSpinner.start("Creating authorition code"); + createAuthCodeSpinner.start("Creating authorization code"); const authorizationCodeResult = await apiClient.createAuthorizationCode(); if (!authorizationCodeResult.success) { diff --git a/packages/cli-v3/src/utilities/installPackages.ts b/packages/cli-v3/src/utilities/installPackages.ts index 157d1110c2..4dd1e933e3 100644 --- a/packages/cli-v3/src/utilities/installPackages.ts +++ b/packages/cli-v3/src/utilities/installPackages.ts @@ -55,17 +55,34 @@ export function stripWorkspaceFromVersion(version: string) { } export function parsePackageName(packageSpecifier: string): { name: string; version?: string } { - const parts = packageSpecifier.split("@"); + let name: string | undefined; + let version: string | undefined; - if (parts.length === 1 && typeof parts[0] === "string") { - return { name: parts[0] }; + // Check if the package is scoped + if (packageSpecifier.startsWith("@")) { + const atIndex = packageSpecifier.indexOf("@", 1); + // If a version is included + if (atIndex !== -1) { + name = packageSpecifier.slice(0, atIndex); + version = packageSpecifier.slice(atIndex + 1); + } else { + name = packageSpecifier; + } + } else { + const [packageName, packageVersion] = packageSpecifier.split("@"); + + if (typeof packageName === "string") { + name = packageName; + } + + version = packageVersion; } - if (parts.length === 2 && typeof parts[0] === "string" && typeof parts[1] === "string") { - return { name: parts[0], version: parts[1] }; + if (!name) { + return { name: packageSpecifier }; } - return { name: packageSpecifier }; + return { name, version }; } async function setPackageJsonDeps(path: string, deps: Record) { diff --git a/packages/cli-v3/src/utilities/javascriptProject.ts b/packages/cli-v3/src/utilities/javascriptProject.ts index c6e983d40e..613f3bde72 100644 --- a/packages/cli-v3/src/utilities/javascriptProject.ts +++ b/packages/cli-v3/src/utilities/javascriptProject.ts @@ -1,4 +1,4 @@ -import { $ } from "execa"; +import { $, ExecaError } from "execa"; import { join } from "node:path"; import { readJSONFileSync } from "./fileSystem"; import { logger } from "./logger"; @@ -269,8 +269,7 @@ class PNPMCommands implements PackageManagerCommands { packageNames: string[], options: PackageManagerOptions ): Promise> { - const { stdout } = await $({ cwd: options.cwd })`${this.cmd} list ${packageNames} -r --json`; - const result = JSON.parse(stdout) as PnpmList; + const result = await this.#listDependencies(packageNames, options); logger.debug(`Resolving ${packageNames.join(" ")} version using ${this.name}`); @@ -289,6 +288,21 @@ class PNPMCommands implements PackageManagerCommands { return results; } + + async #listDependencies(packageNames: string[], options: PackageManagerOptions) { + const childProcess = await $({ + cwd: options.cwd, + reject: false, + })`${this.cmd} list ${packageNames} -r --json`; + + if (childProcess.failed) { + logger.debug("Failed to list dependencies, using stdout anyway...", { + error: childProcess.stderr, + }); + } + + return JSON.parse(childProcess.stdout) as PnpmList; + } } type NpmDependency = { @@ -331,8 +345,7 @@ class NPMCommands implements PackageManagerCommands { packageNames: string[], options: PackageManagerOptions ): Promise> { - const { stdout } = await $({ cwd: options.cwd })`${this.cmd} list ${packageNames} --json`; - const output = JSON.parse(stdout) as NpmListOutput; + const output = await this.#listDependencies(packageNames, options); logger.debug(`Resolving ${packageNames.join(" ")} version using ${this.name}`, { output }); @@ -349,6 +362,21 @@ class NPMCommands implements PackageManagerCommands { return results; } + async #listDependencies(packageNames: string[], options: PackageManagerOptions) { + const childProcess = await $({ + cwd: options.cwd, + reject: false, + })`${this.cmd} list ${packageNames} --json`; + + if (childProcess.failed) { + logger.debug("Failed to list dependencies, using stdout anyway...", { + error: childProcess.stderr, + }); + } + + return JSON.parse(childProcess.stdout) as NpmListOutput; + } + #recursivelySearchDependencies( dependencies: Record, packageName: string @@ -404,7 +432,7 @@ class YarnCommands implements PackageManagerCommands { packageNames: string[], options: PackageManagerOptions ): Promise> { - const { stdout } = await $({ cwd: options.cwd })`${this.cmd} info ${packageNames} --json`; + const stdout = await this.#listDependencies(packageNames, options); const lines = stdout.split("\n"); @@ -425,6 +453,21 @@ class YarnCommands implements PackageManagerCommands { return results; } + async #listDependencies(packageNames: string[], options: PackageManagerOptions) { + const childProcess = await $({ + cwd: options.cwd, + reject: false, + })`${this.cmd} info ${packageNames} --json`; + + if (childProcess.failed) { + logger.debug("Failed to list dependencies, using stdout anyway...", { + error: childProcess.stderr, + }); + } + + return childProcess.stdout; + } + // The "value" when doing yarn info is formatted like this: // "package-name@npm:version" or "package-name@workspace:version" // This function will parse the value into just the package name. diff --git a/packages/cli-v3/src/workers/dev/backgroundWorker.ts b/packages/cli-v3/src/workers/dev/backgroundWorker.ts index 0b2b2d715e..a345fb0865 100644 --- a/packages/cli-v3/src/workers/dev/backgroundWorker.ts +++ b/packages/cli-v3/src/workers/dev/backgroundWorker.ts @@ -323,6 +323,7 @@ export class BackgroundWorker { const fullEnv = { ...this.params.env, ...this.#readEnvVars(), + ...(this.params.debugOtel ? { OTEL_LOG_LEVEL: "debug" } : {}), }; logger.debug("Initializing worker", { path: this.path, cwd, fullEnv }); diff --git a/packages/cli-v3/src/workers/dev/worker-facade.ts b/packages/cli-v3/src/workers/dev/worker-facade.ts index 14358bddbb..efc65c93dd 100644 --- a/packages/cli-v3/src/workers/dev/worker-facade.ts +++ b/packages/cli-v3/src/workers/dev/worker-facade.ts @@ -26,9 +26,8 @@ declare const handleError: HandleErrorFunction | undefined; declare const __PROJECT_CONFIG__: Config; declare const tracingSDK: TracingSDK; - -const otelTracer = tracingSDK.getTracer("trigger-dev-worker", packageJson.version); -const otelLogger = tracingSDK.getLogger("trigger-dev-worker", packageJson.version); +declare const otelTracer: Tracer; +declare const otelLogger: Logger; import { TaskRunErrorCodes, @@ -45,7 +44,8 @@ import { ZodMessageSender, ZodSchemaParsedError, } from "@trigger.dev/core/v3/zodMessageHandler"; -import * as packageJson from "../../../package.json"; +import type { Tracer } from "@opentelemetry/api"; +import type { Logger } from "@opentelemetry/api-logs"; declare const sender: ZodMessageSender; diff --git a/packages/cli-v3/src/workers/dev/worker-setup.ts b/packages/cli-v3/src/workers/dev/worker-setup.ts index a4364d1eb8..f7804ffaa8 100644 --- a/packages/cli-v3/src/workers/dev/worker-setup.ts +++ b/packages/cli-v3/src/workers/dev/worker-setup.ts @@ -1,17 +1,14 @@ -import "source-map-support/register.js"; -import { Resource } from "@opentelemetry/resources"; -import { - ProjectConfig, - SemanticInternalAttributes, - childToWorkerMessages, - taskCatalog, -} from "@trigger.dev/core/v3"; +import type { Tracer } from "@opentelemetry/api"; +import type { Logger } from "@opentelemetry/api-logs"; +import { ProjectConfig, childToWorkerMessages, taskCatalog } from "@trigger.dev/core/v3"; import { + StandardTaskCatalog, TracingDiagnosticLogLevel, TracingSDK, - StandardTaskCatalog, } from "@trigger.dev/core/v3/workers"; import { ZodMessageSender } from "@trigger.dev/core/v3/zodMessageHandler"; +import "source-map-support/register.js"; +import * as packageJson from "../../../package.json"; __SETUP_IMPORTED_PROJECT_CONFIG__; declare const __SETUP_IMPORTED_PROJECT_CONFIG__: unknown; @@ -19,13 +16,13 @@ declare const setupImportedConfig: ProjectConfig | undefined; export const tracingSDK = new TracingSDK({ url: process.env.OTEL_EXPORTER_OTLP_ENDPOINT ?? "http://0.0.0.0:4318", - resource: new Resource({ - [SemanticInternalAttributes.CLI_VERSION]: "3.0.0", - }), instrumentations: setupImportedConfig?.instrumentations ?? [], diagLogLevel: (process.env.OTEL_LOG_LEVEL as TracingDiagnosticLogLevel) ?? "none", }); +export const otelTracer: Tracer = tracingSDK.getTracer("trigger-dev-worker", packageJson.version); +export const otelLogger: Logger = tracingSDK.getLogger("trigger-dev-worker", packageJson.version); + export const sender = new ZodMessageSender({ schema: childToWorkerMessages, sender: async (message) => { diff --git a/packages/cli-v3/src/workers/prod/worker-facade.ts b/packages/cli-v3/src/workers/prod/worker-facade.ts index 2ec2137634..77271b8016 100644 --- a/packages/cli-v3/src/workers/prod/worker-facade.ts +++ b/packages/cli-v3/src/workers/prod/worker-facade.ts @@ -31,9 +31,8 @@ declare const handleError: HandleErrorFunction | undefined; declare const __PROJECT_CONFIG__: Config; declare const tracingSDK: TracingSDK; - -const otelTracer = tracingSDK.getTracer("trigger-prod-worker", packageJson.version); -const otelLogger = tracingSDK.getLogger("trigger-prod-worker", packageJson.version); +declare const otelTracer: Tracer; +declare const otelLogger: Logger; import { TaskRunErrorCodes, @@ -43,7 +42,8 @@ import { runtime, } from "@trigger.dev/core/v3"; import { ProdRuntimeManager } from "@trigger.dev/core/v3/prod"; -import * as packageJson from "../../../package.json"; +import type { Tracer } from "@opentelemetry/api"; +import type { Logger } from "@opentelemetry/api-logs"; const durableClock = new DurableClock(); clock.setGlobalClock(durableClock); diff --git a/packages/cli-v3/src/workers/prod/worker-setup.ts b/packages/cli-v3/src/workers/prod/worker-setup.ts index 4bad77fd50..0546322e42 100644 --- a/packages/cli-v3/src/workers/prod/worker-setup.ts +++ b/packages/cli-v3/src/workers/prod/worker-setup.ts @@ -1,10 +1,12 @@ -import { Resource } from "@opentelemetry/resources"; -import { ProjectConfig, SemanticInternalAttributes, taskCatalog } from "@trigger.dev/core/v3"; +import type { Tracer } from "@opentelemetry/api"; +import * as packageJson from "../../../package.json"; +import { ProjectConfig, taskCatalog } from "@trigger.dev/core/v3"; import { TracingDiagnosticLogLevel, TracingSDK, StandardTaskCatalog, } from "@trigger.dev/core/v3/workers"; +import type { Logger } from "@opentelemetry/api-logs"; __SETUP_IMPORTED_PROJECT_CONFIG__; declare const __SETUP_IMPORTED_PROJECT_CONFIG__: unknown; @@ -12,11 +14,11 @@ declare const setupImportedConfig: ProjectConfig | undefined; export const tracingSDK = new TracingSDK({ url: process.env.OTEL_EXPORTER_OTLP_ENDPOINT ?? "http://0.0.0.0:4318", - resource: new Resource({ - [SemanticInternalAttributes.CLI_VERSION]: "3.0.0", - }), instrumentations: setupImportedConfig?.instrumentations ?? [], diagLogLevel: (process.env.OTEL_LOG_LEVEL as TracingDiagnosticLogLevel) ?? "none", }); +export const otelTracer: Tracer = tracingSDK.getTracer("trigger-prod-worker", packageJson.version); +export const otelLogger: Logger = tracingSDK.getLogger("trigger-prod-worker", packageJson.version); + taskCatalog.setGlobalTaskCatalog(new StandardTaskCatalog()); diff --git a/packages/cli/CHANGELOG.md b/packages/cli/CHANGELOG.md index f732221521..993f239759 100644 --- a/packages/cli/CHANGELOG.md +++ b/packages/cli/CHANGELOG.md @@ -1,5 +1,43 @@ # create-trigger +## 3.0.0-beta.33 + +### Patch Changes + +- Updated dependencies [6a379e4e9] + - @trigger.dev/core@3.0.0-beta.33 + - @trigger.dev/yalt@3.0.0-beta.33 + +## 3.0.0-beta.32 + +### Patch Changes + +- @trigger.dev/core@3.0.0-beta.32 +- @trigger.dev/yalt@3.0.0-beta.32 + +## 3.0.0-beta.31 + +### Patch Changes + +- @trigger.dev/core@3.0.0-beta.31 +- @trigger.dev/yalt@3.0.0-beta.31 + +## 3.0.0-beta.30 + +### Patch Changes + +- Updated dependencies [1477a2e30] +- Updated dependencies [0e919f56f] + - @trigger.dev/core@3.0.0-beta.30 + - @trigger.dev/yalt@3.0.0-beta.30 + +## 3.0.0-beta.29 + +### Patch Changes + +- @trigger.dev/core@3.0.0-beta.29 +- @trigger.dev/yalt@3.0.0-beta.29 + ## 3.0.0-beta.28 ### Patch Changes diff --git a/packages/cli/package.json b/packages/cli/package.json index d75a58408e..b15e57dfb2 100644 --- a/packages/cli/package.json +++ b/packages/cli/package.json @@ -1,6 +1,6 @@ { "name": "@trigger.dev/cli", - "version": "3.0.0-beta.28", + "version": "3.0.0-beta.33", "description": "The Trigger.dev CLI", "main": "./dist/index.js", "types": "./dist/index.d.ts", diff --git a/packages/core-apps/CHANGELOG.md b/packages/core-apps/CHANGELOG.md index 67724bb44a..ea36d0845a 100644 --- a/packages/core-apps/CHANGELOG.md +++ b/packages/core-apps/CHANGELOG.md @@ -1,5 +1,15 @@ # @trigger.dev/core-apps +## 3.0.0-beta.33 + +## 3.0.0-beta.32 + +## 3.0.0-beta.31 + +## 3.0.0-beta.30 + +## 3.0.0-beta.29 + ## 3.0.0-beta.28 ## 3.0.0-beta.27 diff --git a/packages/core-apps/package.json b/packages/core-apps/package.json index 4be4b1f940..a8a70b7be8 100644 --- a/packages/core-apps/package.json +++ b/packages/core-apps/package.json @@ -1,7 +1,7 @@ { "name": "@trigger.dev/core-apps", "description": "Backend core code used across apps", - "version": "3.0.0-beta.28", + "version": "3.0.0-beta.33", "private": true, "license": "MIT", "main": "./dist/index.js", diff --git a/packages/core-backend/CHANGELOG.md b/packages/core-backend/CHANGELOG.md index fe178deec7..0408e02664 100644 --- a/packages/core-backend/CHANGELOG.md +++ b/packages/core-backend/CHANGELOG.md @@ -1,5 +1,15 @@ # @trigger.dev/core-backend +## 3.0.0-beta.33 + +## 3.0.0-beta.32 + +## 3.0.0-beta.31 + +## 3.0.0-beta.30 + +## 3.0.0-beta.29 + ## 3.0.0-beta.28 ## 3.0.0-beta.27 diff --git a/packages/core-backend/package.json b/packages/core-backend/package.json index 70f2b47850..bb11352026 100644 --- a/packages/core-backend/package.json +++ b/packages/core-backend/package.json @@ -1,6 +1,6 @@ { "name": "@trigger.dev/core-backend", - "version": "3.0.0-beta.28", + "version": "3.0.0-beta.33", "description": "Core code used across `@trigger.dev/sdk` and Trigger.dev server", "license": "MIT", "main": "./dist/index.js", diff --git a/packages/core/CHANGELOG.md b/packages/core/CHANGELOG.md index 0cdd7ebd0b..0b87d195a7 100644 --- a/packages/core/CHANGELOG.md +++ b/packages/core/CHANGELOG.md @@ -1,5 +1,24 @@ # internal-platform +## 3.0.0-beta.33 + +### Patch Changes + +- 6a379e4e9: Fix 3rd party otel propagation from breaking our Task Events data from being properly correlated to the correct trace + +## 3.0.0-beta.32 + +## 3.0.0-beta.31 + +## 3.0.0-beta.30 + +### Patch Changes + +- 1477a2e30: Increased the timeout when canceling a checkpoint to 31s (to match the timeout on the server) +- 0e919f56f: Better handle uncaught exceptions + +## 3.0.0-beta.29 + ## 3.0.0-beta.28 ### Patch Changes diff --git a/packages/core/package.json b/packages/core/package.json index 5186c211d5..6143edc5a9 100644 --- a/packages/core/package.json +++ b/packages/core/package.json @@ -1,6 +1,6 @@ { "name": "@trigger.dev/core", - "version": "3.0.0-beta.28", + "version": "3.0.0-beta.33", "description": "Core code used across the Trigger.dev SDK and platform", "license": "MIT", "main": "./dist/index.js", diff --git a/packages/core/src/v3/apiClient/index.ts b/packages/core/src/v3/apiClient/index.ts index 4147800282..5af8061f91 100644 --- a/packages/core/src/v3/apiClient/index.ts +++ b/packages/core/src/v3/apiClient/index.ts @@ -1,5 +1,6 @@ import { context, propagation } from "@opentelemetry/api"; -import { ZodFetchOptions, zodfetch } from "../zodfetch"; +import { version } from "../../../package.json"; +import { APIError } from "../apiErrors"; import { BatchTaskRunExecutionResult, BatchTriggerTaskRequestBody, @@ -19,9 +20,7 @@ import { UpdateScheduleOptions, } from "../schemas"; import { taskContext } from "../task-context-api"; -import { getEnvVar } from "../utils/getEnv"; -import { SafeAsyncLocalStorage } from "../utils/safeAsyncLocalStorage"; -import { APIError } from "../apiErrors"; +import { ZodFetchOptions, zodfetch } from "../zodfetch"; export type TriggerOptions = { spanParentAsLink?: boolean; @@ -239,10 +238,12 @@ export class ApiClient { const headers: Record = { "Content-Type": "application/json", Authorization: `Bearer ${this.accessToken}`, + "trigger-version": version, }; // Only inject the context if we are inside a task if (taskContext.isInsideTask) { + headers["x-trigger-worker"] = "true"; propagation.inject(context.active(), headers); if (spanParentAsLink) { diff --git a/packages/core/src/v3/errors.ts b/packages/core/src/v3/errors.ts index c9a1e15515..61ee0575b2 100644 --- a/packages/core/src/v3/errors.ts +++ b/packages/core/src/v3/errors.ts @@ -54,6 +54,33 @@ export function createErrorTaskError(error: TaskRunError): any { } } +export function createJsonErrorObject(error: TaskRunError) { + switch (error.type) { + case "BUILT_IN_ERROR": { + return { + name: error.name, + message: error.message, + stackTrace: error.stackTrace, + }; + } + case "STRING_ERROR": { + return { + message: error.raw, + }; + } + case "CUSTOM_ERROR": { + return { + message: error.raw, + }; + } + case "INTERNAL_ERROR": { + return { + message: `trigger.dev internal error (${error.code})`, + }; + } + } +} + export function correctErrorStackTrace( stackTrace: string, projectDir?: string, diff --git a/packages/core/src/v3/otel/tracingSDK.ts b/packages/core/src/v3/otel/tracingSDK.ts index 50d344da50..ce2b20f10e 100644 --- a/packages/core/src/v3/otel/tracingSDK.ts +++ b/packages/core/src/v3/otel/tracingSDK.ts @@ -40,6 +40,7 @@ import { import { SemanticInternalAttributes } from "../semanticInternalAttributes"; import { TaskContextLogProcessor, TaskContextSpanProcessor } from "../taskContext/otelProcessors"; import { getEnvVar } from "../utils/getEnv"; +import { version } from "../../../package.json"; class AsyncResourceDetector implements DetectorSync { private _promise: Promise; @@ -111,6 +112,7 @@ export class TracingSDK { new Resource({ [SemanticResourceAttributes.CLOUD_PROVIDER]: "trigger.dev", [SemanticInternalAttributes.TRIGGER]: true, + [SemanticInternalAttributes.CLI_VERSION]: version, }) ) .merge(config.resource ?? new Resource({})) diff --git a/packages/core/src/v3/runtime/prodRuntimeManager.ts b/packages/core/src/v3/runtime/prodRuntimeManager.ts index 02a3c8c787..622a44ed7d 100644 --- a/packages/core/src/v3/runtime/prodRuntimeManager.ts +++ b/packages/core/src/v3/runtime/prodRuntimeManager.ts @@ -75,10 +75,14 @@ export class ProdRuntimeManager implements RuntimeManager { clock.reset(); // The coordinator should cancel any in-progress checkpoints - const { checkpointCanceled, version } = await this.ipc.sendWithAck("CANCEL_CHECKPOINT", { - version: "v2", - reason: "WAIT_FOR_DURATION", - }); + const { checkpointCanceled, version } = await this.ipc.sendWithAck( + "CANCEL_CHECKPOINT", + { + version: "v2", + reason: "WAIT_FOR_DURATION", + }, + 31_000 + ); if (checkpointCanceled) { // There won't be a checkpoint or external resume and we've already completed our internal timeout diff --git a/packages/core/src/v3/schemas/api.ts b/packages/core/src/v3/schemas/api.ts index 9861d921e2..19fe81fd52 100644 --- a/packages/core/src/v3/schemas/api.ts +++ b/packages/core/src/v3/schemas/api.ts @@ -115,6 +115,7 @@ export type GetEnvironmentVariablesResponseBody = z.infer< export const StartDeploymentIndexingRequestBody = z.object({ imageReference: z.string(), + selfHosted: z.boolean().optional(), }); export type StartDeploymentIndexingRequestBody = z.infer; diff --git a/packages/database/prisma/migrations/20240425122814_add_alert_schema/migration.sql b/packages/database/prisma/migrations/20240425122814_add_alert_schema/migration.sql new file mode 100644 index 0000000000..06ad92b355 --- /dev/null +++ b/packages/database/prisma/migrations/20240425122814_add_alert_schema/migration.sql @@ -0,0 +1,64 @@ +-- CreateEnum +CREATE TYPE "ProjectAlertChannelType" AS ENUM ('EMAIL', 'SLACK', 'WEBHOOK'); + +-- CreateEnum +CREATE TYPE "ProjectAlertType" AS ENUM ('TASK_RUN_ATTEMPT', 'DEPLOYMENT_FAILURE'); + +-- CreateEnum +CREATE TYPE "ProjectAlertStatus" AS ENUM ('PENDING', 'SENT', 'FAILED'); + +-- CreateTable +CREATE TABLE "ProjectAlertChannel" ( + "id" TEXT NOT NULL, + "friendlyId" TEXT NOT NULL, + "type" "ProjectAlertChannelType" NOT NULL, + "name" TEXT NOT NULL, + "properties" JSONB NOT NULL, + "alertTypes" "ProjectAlertType"[], + "projectId" TEXT NOT NULL, + "createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP, + "updatedAt" TIMESTAMP(3) NOT NULL, + + CONSTRAINT "ProjectAlertChannel_pkey" PRIMARY KEY ("id") +); + +-- CreateTable +CREATE TABLE "ProjectAlert" ( + "id" TEXT NOT NULL, + "friendlyId" TEXT NOT NULL, + "projectId" TEXT NOT NULL, + "environmentId" TEXT NOT NULL, + "channelId" TEXT NOT NULL, + "status" "ProjectAlertStatus" NOT NULL DEFAULT 'PENDING', + "type" "ProjectAlertType" NOT NULL, + "taskRunAttemptId" TEXT, + "workerDeploymentId" TEXT, + "createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP, + "updatedAt" TIMESTAMP(3) NOT NULL, + + CONSTRAINT "ProjectAlert_pkey" PRIMARY KEY ("id") +); + +-- CreateIndex +CREATE UNIQUE INDEX "ProjectAlertChannel_friendlyId_key" ON "ProjectAlertChannel"("friendlyId"); + +-- CreateIndex +CREATE UNIQUE INDEX "ProjectAlert_friendlyId_key" ON "ProjectAlert"("friendlyId"); + +-- AddForeignKey +ALTER TABLE "ProjectAlertChannel" ADD CONSTRAINT "ProjectAlertChannel_projectId_fkey" FOREIGN KEY ("projectId") REFERENCES "Project"("id") ON DELETE CASCADE ON UPDATE CASCADE; + +-- AddForeignKey +ALTER TABLE "ProjectAlert" ADD CONSTRAINT "ProjectAlert_projectId_fkey" FOREIGN KEY ("projectId") REFERENCES "Project"("id") ON DELETE CASCADE ON UPDATE CASCADE; + +-- AddForeignKey +ALTER TABLE "ProjectAlert" ADD CONSTRAINT "ProjectAlert_environmentId_fkey" FOREIGN KEY ("environmentId") REFERENCES "RuntimeEnvironment"("id") ON DELETE CASCADE ON UPDATE CASCADE; + +-- AddForeignKey +ALTER TABLE "ProjectAlert" ADD CONSTRAINT "ProjectAlert_channelId_fkey" FOREIGN KEY ("channelId") REFERENCES "ProjectAlertChannel"("id") ON DELETE CASCADE ON UPDATE CASCADE; + +-- AddForeignKey +ALTER TABLE "ProjectAlert" ADD CONSTRAINT "ProjectAlert_taskRunAttemptId_fkey" FOREIGN KEY ("taskRunAttemptId") REFERENCES "TaskRunAttempt"("id") ON DELETE CASCADE ON UPDATE CASCADE; + +-- AddForeignKey +ALTER TABLE "ProjectAlert" ADD CONSTRAINT "ProjectAlert_workerDeploymentId_fkey" FOREIGN KEY ("workerDeploymentId") REFERENCES "WorkerDeployment"("id") ON DELETE CASCADE ON UPDATE CASCADE; diff --git a/packages/database/prisma/migrations/20240425131147_add_enabled_flag_to_alert_channels/migration.sql b/packages/database/prisma/migrations/20240425131147_add_enabled_flag_to_alert_channels/migration.sql new file mode 100644 index 0000000000..1b43b31bf6 --- /dev/null +++ b/packages/database/prisma/migrations/20240425131147_add_enabled_flag_to_alert_channels/migration.sql @@ -0,0 +1,2 @@ +-- AlterTable +ALTER TABLE "ProjectAlertChannel" ADD COLUMN "enabled" BOOLEAN NOT NULL DEFAULT true; diff --git a/packages/database/prisma/migrations/20240426095144_add_deployment_success_alert_type/migration.sql b/packages/database/prisma/migrations/20240426095144_add_deployment_success_alert_type/migration.sql new file mode 100644 index 0000000000..80ee3726b8 --- /dev/null +++ b/packages/database/prisma/migrations/20240426095144_add_deployment_success_alert_type/migration.sql @@ -0,0 +1,2 @@ +-- AlterEnum +ALTER TYPE "ProjectAlertType" ADD VALUE 'DEPLOYMENT_SUCCESS'; diff --git a/packages/database/prisma/migrations/20240426095622_add_deduplication_key_to_alert_channels/migration.sql b/packages/database/prisma/migrations/20240426095622_add_deduplication_key_to_alert_channels/migration.sql new file mode 100644 index 0000000000..f5254c4a07 --- /dev/null +++ b/packages/database/prisma/migrations/20240426095622_add_deduplication_key_to_alert_channels/migration.sql @@ -0,0 +1,9 @@ +/* + Warnings: + + - The required column `deduplicationKey` was added to the `ProjectAlertChannel` table with a prisma-level default value. This is not possible if the table is not empty. Please add this column as optional, then populate it before making it required. + +*/ +-- AlterTable +ALTER TABLE "ProjectAlertChannel" ADD COLUMN "deduplicationKey" TEXT NOT NULL, +ADD COLUMN "userProvidedDeduplicationKey" BOOLEAN NOT NULL DEFAULT false; diff --git a/packages/database/prisma/migrations/20240426102405_add_unique_deduplication_index_to_alert_channels/migration.sql b/packages/database/prisma/migrations/20240426102405_add_unique_deduplication_index_to_alert_channels/migration.sql new file mode 100644 index 0000000000..47bb2fe979 --- /dev/null +++ b/packages/database/prisma/migrations/20240426102405_add_unique_deduplication_index_to_alert_channels/migration.sql @@ -0,0 +1,8 @@ +/* + Warnings: + + - A unique constraint covering the columns `[projectId,deduplicationKey]` on the table `ProjectAlertChannel` will be added. If there are existing duplicate values, this will fail. + +*/ +-- CreateIndex +CREATE UNIQUE INDEX "ProjectAlertChannel_projectId_deduplicationKey_key" ON "ProjectAlertChannel"("projectId", "deduplicationKey"); diff --git a/packages/database/prisma/migrations/20240428142050_add_models_for_slack_integration/migration.sql b/packages/database/prisma/migrations/20240428142050_add_models_for_slack_integration/migration.sql new file mode 100644 index 0000000000..a4977bc849 --- /dev/null +++ b/packages/database/prisma/migrations/20240428142050_add_models_for_slack_integration/migration.sql @@ -0,0 +1,31 @@ +-- CreateEnum +CREATE TYPE "IntegrationService" AS ENUM ('SLACK'); + +-- AlterTable +ALTER TABLE "ProjectAlertChannel" ADD COLUMN "integrationId" TEXT; + +-- CreateTable +CREATE TABLE "OrganizationIntegration" ( + "id" TEXT NOT NULL, + "friendlyId" TEXT NOT NULL, + "service" "IntegrationService" NOT NULL, + "integrationData" JSONB, + "tokenReferenceId" TEXT, + "organizationId" TEXT NOT NULL, + "createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP, + "updatedAt" TIMESTAMP(3) NOT NULL, + + CONSTRAINT "OrganizationIntegration_pkey" PRIMARY KEY ("id") +); + +-- CreateIndex +CREATE UNIQUE INDEX "OrganizationIntegration_friendlyId_key" ON "OrganizationIntegration"("friendlyId"); + +-- AddForeignKey +ALTER TABLE "ProjectAlertChannel" ADD CONSTRAINT "ProjectAlertChannel_integrationId_fkey" FOREIGN KEY ("integrationId") REFERENCES "OrganizationIntegration"("id") ON DELETE SET NULL ON UPDATE CASCADE; + +-- AddForeignKey +ALTER TABLE "OrganizationIntegration" ADD CONSTRAINT "OrganizationIntegration_tokenReferenceId_fkey" FOREIGN KEY ("tokenReferenceId") REFERENCES "SecretReference"("id") ON DELETE SET NULL ON UPDATE CASCADE; + +-- AddForeignKey +ALTER TABLE "OrganizationIntegration" ADD CONSTRAINT "OrganizationIntegration_organizationId_fkey" FOREIGN KEY ("organizationId") REFERENCES "Organization"("id") ON DELETE CASCADE ON UPDATE CASCADE; diff --git a/packages/database/prisma/migrations/20240428150144_org_integration_non_optional_fields/migration.sql b/packages/database/prisma/migrations/20240428150144_org_integration_non_optional_fields/migration.sql new file mode 100644 index 0000000000..56d2a83cba --- /dev/null +++ b/packages/database/prisma/migrations/20240428150144_org_integration_non_optional_fields/migration.sql @@ -0,0 +1,16 @@ +/* + Warnings: + + - Made the column `integrationData` on table `OrganizationIntegration` required. This step will fail if there are existing NULL values in that column. + - Made the column `tokenReferenceId` on table `OrganizationIntegration` required. This step will fail if there are existing NULL values in that column. + +*/ +-- DropForeignKey +ALTER TABLE "OrganizationIntegration" DROP CONSTRAINT "OrganizationIntegration_tokenReferenceId_fkey"; + +-- AlterTable +ALTER TABLE "OrganizationIntegration" ALTER COLUMN "integrationData" SET NOT NULL, +ALTER COLUMN "tokenReferenceId" SET NOT NULL; + +-- AddForeignKey +ALTER TABLE "OrganizationIntegration" ADD CONSTRAINT "OrganizationIntegration_tokenReferenceId_fkey" FOREIGN KEY ("tokenReferenceId") REFERENCES "SecretReference"("id") ON DELETE CASCADE ON UPDATE CASCADE; diff --git a/packages/database/prisma/migrations/20240507113449_add_alert_storage/migration.sql b/packages/database/prisma/migrations/20240507113449_add_alert_storage/migration.sql new file mode 100644 index 0000000000..476f866ca0 --- /dev/null +++ b/packages/database/prisma/migrations/20240507113449_add_alert_storage/migration.sql @@ -0,0 +1,19 @@ +-- CreateTable +CREATE TABLE "ProjectAlertStorage" ( + "id" TEXT NOT NULL, + "projectId" TEXT NOT NULL, + "alertChannelId" TEXT NOT NULL, + "alertType" "ProjectAlertType" NOT NULL, + "storageId" TEXT NOT NULL, + "storageData" JSONB NOT NULL, + "createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP, + "updatedAt" TIMESTAMP(3) NOT NULL, + + CONSTRAINT "ProjectAlertStorage_pkey" PRIMARY KEY ("id") +); + +-- AddForeignKey +ALTER TABLE "ProjectAlertStorage" ADD CONSTRAINT "ProjectAlertStorage_projectId_fkey" FOREIGN KEY ("projectId") REFERENCES "Project"("id") ON DELETE CASCADE ON UPDATE CASCADE; + +-- AddForeignKey +ALTER TABLE "ProjectAlertStorage" ADD CONSTRAINT "ProjectAlertStorage_alertChannelId_fkey" FOREIGN KEY ("alertChannelId") REFERENCES "ProjectAlertChannel"("id") ON DELETE CASCADE ON UPDATE CASCADE; diff --git a/packages/database/prisma/migrations/20240517105021_add_environment_types_to_alert_channel/migration.sql b/packages/database/prisma/migrations/20240517105021_add_environment_types_to_alert_channel/migration.sql new file mode 100644 index 0000000000..a1b29617ba --- /dev/null +++ b/packages/database/prisma/migrations/20240517105021_add_environment_types_to_alert_channel/migration.sql @@ -0,0 +1,5 @@ +-- AlterEnum +ALTER TYPE "ProjectAlertType" ADD VALUE 'TEST'; + +-- AlterTable +ALTER TABLE "ProjectAlertChannel" ADD COLUMN "environmentTypes" "RuntimeEnvironmentType"[] DEFAULT ARRAY['STAGING', 'PRODUCTION']::"RuntimeEnvironmentType"[]; diff --git a/packages/database/prisma/migrations/20240517105224_remove_test_alert_type/migration.sql b/packages/database/prisma/migrations/20240517105224_remove_test_alert_type/migration.sql new file mode 100644 index 0000000000..3905e50a96 --- /dev/null +++ b/packages/database/prisma/migrations/20240517105224_remove_test_alert_type/migration.sql @@ -0,0 +1,16 @@ +/* + Warnings: + + - The values [TEST] on the enum `ProjectAlertType` will be removed. If these variants are still used in the database, this will fail. + +*/ +-- AlterEnum +BEGIN; +CREATE TYPE "ProjectAlertType_new" AS ENUM ('TASK_RUN_ATTEMPT', 'DEPLOYMENT_FAILURE', 'DEPLOYMENT_SUCCESS'); +ALTER TABLE "ProjectAlertChannel" ALTER COLUMN "alertTypes" TYPE "ProjectAlertType_new"[] USING ("alertTypes"::text::"ProjectAlertType_new"[]); +ALTER TABLE "ProjectAlert" ALTER COLUMN "type" TYPE "ProjectAlertType_new" USING ("type"::text::"ProjectAlertType_new"); +ALTER TABLE "ProjectAlertStorage" ALTER COLUMN "alertType" TYPE "ProjectAlertType_new" USING ("alertType"::text::"ProjectAlertType_new"); +ALTER TYPE "ProjectAlertType" RENAME TO "ProjectAlertType_old"; +ALTER TYPE "ProjectAlertType_new" RENAME TO "ProjectAlertType"; +DROP TYPE "ProjectAlertType_old"; +COMMIT; diff --git a/packages/database/prisma/migrations/20240517135206_created_bulk_action_group_and_bulk_action_item_for_canceling_and_replaying_in_bulk/migration.sql b/packages/database/prisma/migrations/20240517135206_created_bulk_action_group_and_bulk_action_item_for_canceling_and_replaying_in_bulk/migration.sql new file mode 100644 index 0000000000..1cfaff8d9b --- /dev/null +++ b/packages/database/prisma/migrations/20240517135206_created_bulk_action_group_and_bulk_action_item_for_canceling_and_replaying_in_bulk/migration.sql @@ -0,0 +1,54 @@ +-- CreateEnum +CREATE TYPE "BulkActionType" AS ENUM ('CANCEL', 'REPLAY'); + +-- CreateEnum +CREATE TYPE "BulkActionStatus" AS ENUM ('PENDING', 'COMPLETED'); + +-- CreateEnum +CREATE TYPE "BulkActionItemStatus" AS ENUM ('PENDING', 'COMPLETED'); + +-- CreateTable +CREATE TABLE "BulkActionGroup" ( + "id" TEXT NOT NULL, + "friendlyId" TEXT NOT NULL, + "projectId" TEXT NOT NULL, + "type" "BulkActionType" NOT NULL, + "status" "BulkActionStatus" NOT NULL DEFAULT 'PENDING', + "createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP, + "updatedAt" TIMESTAMP(3) NOT NULL, + + CONSTRAINT "BulkActionGroup_pkey" PRIMARY KEY ("id") +); + +-- CreateTable +CREATE TABLE "BulkActionItem" ( + "id" TEXT NOT NULL, + "friendlyId" TEXT NOT NULL, + "groupId" TEXT NOT NULL, + "type" "BulkActionType" NOT NULL, + "status" "BulkActionItemStatus" NOT NULL DEFAULT 'PENDING', + "sourceRunId" TEXT, + "destinationRunId" TEXT, + "createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP, + "updatedAt" TIMESTAMP(3) NOT NULL, + + CONSTRAINT "BulkActionItem_pkey" PRIMARY KEY ("id") +); + +-- CreateIndex +CREATE UNIQUE INDEX "BulkActionGroup_friendlyId_key" ON "BulkActionGroup"("friendlyId"); + +-- CreateIndex +CREATE UNIQUE INDEX "BulkActionItem_friendlyId_key" ON "BulkActionItem"("friendlyId"); + +-- AddForeignKey +ALTER TABLE "BulkActionGroup" ADD CONSTRAINT "BulkActionGroup_projectId_fkey" FOREIGN KEY ("projectId") REFERENCES "Project"("id") ON DELETE CASCADE ON UPDATE CASCADE; + +-- AddForeignKey +ALTER TABLE "BulkActionItem" ADD CONSTRAINT "BulkActionItem_groupId_fkey" FOREIGN KEY ("groupId") REFERENCES "BulkActionGroup"("id") ON DELETE CASCADE ON UPDATE CASCADE; + +-- AddForeignKey +ALTER TABLE "BulkActionItem" ADD CONSTRAINT "BulkActionItem_sourceRunId_fkey" FOREIGN KEY ("sourceRunId") REFERENCES "TaskRun"("id") ON DELETE CASCADE ON UPDATE CASCADE; + +-- AddForeignKey +ALTER TABLE "BulkActionItem" ADD CONSTRAINT "BulkActionItem_destinationRunId_fkey" FOREIGN KEY ("destinationRunId") REFERENCES "TaskRun"("id") ON DELETE CASCADE ON UPDATE CASCADE; diff --git a/packages/database/prisma/migrations/20240517164246_bulk_action_item_source_run_id_is_required/migration.sql b/packages/database/prisma/migrations/20240517164246_bulk_action_item_source_run_id_is_required/migration.sql new file mode 100644 index 0000000000..bf93ab4a33 --- /dev/null +++ b/packages/database/prisma/migrations/20240517164246_bulk_action_item_source_run_id_is_required/migration.sql @@ -0,0 +1,8 @@ +/* + Warnings: + + - Made the column `sourceRunId` on table `BulkActionItem` required. This step will fail if there are existing NULL values in that column. + +*/ +-- AlterTable +ALTER TABLE "BulkActionItem" ALTER COLUMN "sourceRunId" SET NOT NULL; diff --git a/packages/database/prisma/migrations/20240517164924_bulk_action_item_added_failed_state_with_error/migration.sql b/packages/database/prisma/migrations/20240517164924_bulk_action_item_added_failed_state_with_error/migration.sql new file mode 100644 index 0000000000..9d5be8b975 --- /dev/null +++ b/packages/database/prisma/migrations/20240517164924_bulk_action_item_added_failed_state_with_error/migration.sql @@ -0,0 +1,5 @@ +-- AlterEnum +ALTER TYPE "BulkActionItemStatus" ADD VALUE 'FAILED'; + +-- AlterTable +ALTER TABLE "BulkActionItem" ADD COLUMN "error" TEXT; diff --git a/packages/database/prisma/migrations/20240520112812_create_deferred_scheduled_event_model/migration.sql b/packages/database/prisma/migrations/20240520112812_create_deferred_scheduled_event_model/migration.sql new file mode 100644 index 0000000000..b6f418208a --- /dev/null +++ b/packages/database/prisma/migrations/20240520112812_create_deferred_scheduled_event_model/migration.sql @@ -0,0 +1,17 @@ +-- CreateTable +CREATE TABLE "DeferredScheduledEventService" ( + "id" TEXT NOT NULL, + "scheduleSourceId" TEXT NOT NULL, + "runAt" TIMESTAMP(3) NOT NULL, + "lastTimestamp" TIMESTAMP(3), + "createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP, + "updatedAt" TIMESTAMP(3) NOT NULL, + + CONSTRAINT "DeferredScheduledEventService_pkey" PRIMARY KEY ("id") +); + +-- CreateIndex +CREATE UNIQUE INDEX "DeferredScheduledEventService_scheduleSourceId_key" ON "DeferredScheduledEventService"("scheduleSourceId"); + +-- AddForeignKey +ALTER TABLE "DeferredScheduledEventService" ADD CONSTRAINT "DeferredScheduledEventService_scheduleSourceId_fkey" FOREIGN KEY ("scheduleSourceId") REFERENCES "ScheduleSource"("id") ON DELETE CASCADE ON UPDATE CASCADE; diff --git a/packages/database/prisma/schema.prisma b/packages/database/prisma/schema.prisma index d36c3061ec..75551d0cb0 100644 --- a/packages/database/prisma/schema.prisma +++ b/packages/database/prisma/schema.prisma @@ -130,12 +130,13 @@ model Organization { events EventRecord[] jobRuns JobRun[] - projects Project[] - members OrgMember[] - invites OrgMemberInvite[] - externalAccounts ExternalAccount[] - integrations Integration[] - sources TriggerSource[] + projects Project[] + members OrgMember[] + invites OrgMemberInvite[] + externalAccounts ExternalAccount[] + integrations Integration[] + sources TriggerSource[] + organizationIntegrations OrganizationIntegration[] } model ExternalAccount { @@ -401,6 +402,7 @@ model RuntimeEnvironment { taskRunAttempts TaskRunAttempt[] CheckpointRestoreEvent CheckpointRestoreEvent[] taskScheduleInstances TaskScheduleInstance[] + alerts ProjectAlert[] sessions RuntimeEnvironmentSession[] currentSession RuntimeEnvironmentSession? @relation("currentSession", fields: [currentSessionId], references: [id], onDelete: SetNull, onUpdate: Cascade) @@ -452,6 +454,10 @@ model Project { WorkerDeployment WorkerDeployment[] CheckpointRestoreEvent CheckpointRestoreEvent[] taskSchedules TaskSchedule[] + alertChannels ProjectAlertChannel[] + alerts ProjectAlert[] + alertStorages ProjectAlertStorage[] + bulkActionGroups BulkActionGroup[] } enum ProjectVersion { @@ -1129,8 +1135,9 @@ model SecretReference { httpEndpoints TriggerHttpEndpoint[] environmentVariableValues EnvironmentVariableValue[] - createdAt DateTime @default(now()) - updatedAt DateTime @updatedAt + createdAt DateTime @default(now()) + updatedAt DateTime @updatedAt + OrganizationIntegration OrganizationIntegration[] } enum SecretStoreProvider { @@ -1383,8 +1390,9 @@ model ScheduleSource { dynamicTrigger DynamicTrigger? @relation(fields: [dynamicTriggerId], references: [id], onDelete: Cascade, onUpdate: Cascade) dynamicTriggerId String? - externalAccount ExternalAccount? @relation(fields: [externalAccountId], references: [id], onDelete: Cascade, onUpdate: Cascade) + externalAccount ExternalAccount? @relation(fields: [externalAccountId], references: [id], onDelete: Cascade, onUpdate: Cascade) externalAccountId String? + deferredEvent DeferredScheduledEventService? @@unique([key, environmentId]) } @@ -1509,6 +1517,19 @@ model DataMigration { completedAt DateTime? } +model DeferredScheduledEventService { + id String @id @default(cuid()) + + scheduleSource ScheduleSource @relation(fields: [scheduleSourceId], references: [id], onDelete: Cascade, onUpdate: Cascade) + scheduleSourceId String @unique + + runAt DateTime + lastTimestamp DateTime? + + createdAt DateTime @default(now()) + updatedAt DateTime @updatedAt +} + // ==================================================== // v3 Models // ==================================================== @@ -1635,6 +1656,9 @@ model TaskRun { schedule TaskSchedule? @relation(fields: [scheduleId], references: [id], onDelete: SetNull) scheduleId String? + sourceBulkActionItems BulkActionItem[] @relation("SourceActionItemRun") + destinationBulkActionItems BulkActionItem[] @relation("DestinationActionItemRun") + @@unique([runtimeEnvironmentId, idempotencyKey]) // Task activity graph @@index([projectId, createdAt, taskIdentifier]) @@ -1764,6 +1788,7 @@ model TaskRunAttempt { checkpoints Checkpoint[] batchTaskRunItems BatchTaskRunItem[] CheckpointRestoreEvent CheckpointRestoreEvent[] + alerts ProjectAlert[] @@unique([taskRunId, number]) } @@ -2105,6 +2130,7 @@ model WorkerDeployment { updatedAt DateTime @updatedAt promotions WorkerDeploymentPromotion[] + alerts ProjectAlert[] @@unique([projectId, shortCode]) @@unique([environmentId, version]) @@ -2218,3 +2244,187 @@ model RuntimeEnvironmentSession { currentEnvironments RuntimeEnvironment[] @relation("currentSession") } + +model ProjectAlertChannel { + id String @id @default(cuid()) + + friendlyId String @unique + + ///can be provided and we won't create another with the same key + deduplicationKey String @default(cuid()) + userProvidedDeduplicationKey Boolean @default(false) + + integration OrganizationIntegration? @relation(fields: [integrationId], references: [id], onDelete: SetNull, onUpdate: Cascade) + integrationId String? + + enabled Boolean @default(true) + + type ProjectAlertChannelType + name String + properties Json + alertTypes ProjectAlertType[] + environmentTypes RuntimeEnvironmentType[] @default([STAGING, PRODUCTION]) + + project Project @relation(fields: [projectId], references: [id], onDelete: Cascade, onUpdate: Cascade) + projectId String + + createdAt DateTime @default(now()) + updatedAt DateTime @updatedAt + + alerts ProjectAlert[] + alertStorages ProjectAlertStorage[] + + @@unique([projectId, deduplicationKey]) +} + +enum ProjectAlertChannelType { + EMAIL + SLACK + WEBHOOK +} + +model ProjectAlert { + id String @id @default(cuid()) + friendlyId String @unique + + project Project @relation(fields: [projectId], references: [id], onDelete: Cascade, onUpdate: Cascade) + projectId String + + environment RuntimeEnvironment @relation(fields: [environmentId], references: [id], onDelete: Cascade, onUpdate: Cascade) + environmentId String + + channel ProjectAlertChannel @relation(fields: [channelId], references: [id], onDelete: Cascade, onUpdate: Cascade) + channelId String + + status ProjectAlertStatus @default(PENDING) + + type ProjectAlertType + + taskRunAttempt TaskRunAttempt? @relation(fields: [taskRunAttemptId], references: [id], onDelete: Cascade, onUpdate: Cascade) + taskRunAttemptId String? + + workerDeployment WorkerDeployment? @relation(fields: [workerDeploymentId], references: [id], onDelete: Cascade, onUpdate: Cascade) + workerDeploymentId String? + + createdAt DateTime @default(now()) + updatedAt DateTime @updatedAt +} + +enum ProjectAlertType { + TASK_RUN_ATTEMPT + DEPLOYMENT_FAILURE + DEPLOYMENT_SUCCESS +} + +enum ProjectAlertStatus { + PENDING + SENT + FAILED +} + +model ProjectAlertStorage { + id String @id @default(cuid()) + + project Project @relation(fields: [projectId], references: [id], onDelete: Cascade, onUpdate: Cascade) + projectId String + + alertChannel ProjectAlertChannel @relation(fields: [alertChannelId], references: [id], onDelete: Cascade, onUpdate: Cascade) + alertChannelId String + + alertType ProjectAlertType + + storageId String + storageData Json + + createdAt DateTime @default(now()) + updatedAt DateTime @updatedAt +} + +model OrganizationIntegration { + id String @id @default(cuid()) + + friendlyId String @unique + + service IntegrationService + + integrationData Json + + tokenReference SecretReference @relation(fields: [tokenReferenceId], references: [id], onDelete: Cascade, onUpdate: Cascade) + tokenReferenceId String + + organization Organization @relation(fields: [organizationId], references: [id], onDelete: Cascade, onUpdate: Cascade) + organizationId String + + createdAt DateTime @default(now()) + updatedAt DateTime @updatedAt + + alertChannels ProjectAlertChannel[] +} + +enum IntegrationService { + SLACK +} + +/// Bulk actions, like canceling and replaying runs +model BulkActionGroup { + id String @id @default(cuid()) + + friendlyId String @unique + + project Project @relation(fields: [projectId], references: [id], onDelete: Cascade, onUpdate: Cascade) + projectId String + + type BulkActionType + items BulkActionItem[] + + /// When the group is created it's pending. After we've processed all the items it's completed. This does not mean the associated runs are completed. + status BulkActionStatus @default(PENDING) + + createdAt DateTime @default(now()) + updatedAt DateTime @updatedAt +} + +enum BulkActionType { + /// Cancels existing runs. This populates the destination runs. + CANCEL + /// Replays existing runs. The original runs go as source runs, and the new runs go as destination runs. + REPLAY +} + +enum BulkActionStatus { + PENDING + COMPLETED +} + +model BulkActionItem { + id String @id @default(cuid()) + + friendlyId String @unique + + group BulkActionGroup @relation(fields: [groupId], references: [id], onDelete: Cascade, onUpdate: Cascade) + groupId String + + type BulkActionType + + /// When the item is created it's pending. After we've processed the item it's completed. This does not mean the associated runs are completed. + status BulkActionItemStatus @default(PENDING) + + /// The run that is the source of the action, e.g. when replaying this is the original run + sourceRun TaskRun @relation("SourceActionItemRun", fields: [sourceRunId], references: [id], onDelete: Cascade, onUpdate: Cascade) + sourceRunId String + + /// The run that's a result of the action, this will be set when the run has been created + destinationRun TaskRun? @relation("DestinationActionItemRun", fields: [destinationRunId], references: [id], onDelete: Cascade, onUpdate: Cascade) + destinationRunId String? + + error String? + + createdAt DateTime @default(now()) + updatedAt DateTime @updatedAt +} + +enum BulkActionItemStatus { + PENDING + COMPLETED + FAILED +} diff --git a/packages/emails/emails/alert-attempt-failure.tsx b/packages/emails/emails/alert-attempt-failure.tsx new file mode 100644 index 0000000000..f7cfcb6c23 --- /dev/null +++ b/packages/emails/emails/alert-attempt-failure.tsx @@ -0,0 +1,87 @@ +import { + Body, + CodeBlock, + Container, + Head, + Html, + Link, + Preview, + Text, + dracula, +} from "@react-email/components"; +import { z } from "zod"; +import { Footer } from "./components/Footer"; +import { Image } from "./components/Image"; +import { anchor, container, h1, main, paragraphLight, paragraphTight } from "./components/styles"; + +export const AlertAttemptEmailSchema = z.object({ + email: z.literal("alert-attempt"), + taskIdentifier: z.string(), + fileName: z.string(), + exportName: z.string(), + version: z.string(), + environment: z.string(), + error: z.object({ + message: z.string(), + name: z.string().optional(), + stackTrace: z.string().optional(), + }), + attemptLink: z.string().url(), +}); + +const previewDefaults = { + taskIdentifier: "my-task", + fileName: "other.ts", + exportName: "myTask", + version: "20240101.1", + environment: "prod", + error: { + message: "Error message", + name: "Error name", + stackTrace: "Error stack trace", + }, + attemptLink: "https://trigger.dev", +}; + +export default function Email(props: z.infer) { + const { taskIdentifier, fileName, exportName, version, environment, error, attemptLink } = { + ...previewDefaults, + ...props, + }; + + return ( + + + {`[${version}.${environment} ${taskIdentifier}] ${error.message}`} + + + There's been an error on `{taskIdentifier}` + Task ID: {taskIdentifier} + Filename: {fileName} + Function: {exportName}() + Version: {version} + Environment: {environment} + + {error.message} + {error.stackTrace && ( + + )} + + Investigate this error + + + Trigger.dev +