From 987092c3bb543900d1000dcdf9305b345862bef2 Mon Sep 17 00:00:00 2001 From: D-K-P <8297864+D-K-P@users.noreply.github.com> Date: Tue, 28 Oct 2025 17:46:32 +0000 Subject: [PATCH 01/13] Started adding use cases pages --- docs/docs.json | 9 ++ docs/guides/introduction.mdx | 35 ++++++ docs/guides/use-cases/data-processing-etl.mdx | 106 +++++++++++++++++ docs/guides/use-cases/marketing.mdx | 103 ++++++++++++++++ docs/guides/use-cases/media-generation.mdx | 111 ++++++++++++++++++ docs/guides/use-cases/media-processing.mdx | 107 +++++++++++++++++ docs/snippets/use-cases-cards.mdx | 46 ++++++++ 7 files changed, 517 insertions(+) create mode 100644 docs/guides/use-cases/data-processing-etl.mdx create mode 100644 docs/guides/use-cases/marketing.mdx create mode 100644 docs/guides/use-cases/media-generation.mdx create mode 100644 docs/guides/use-cases/media-processing.mdx create mode 100644 docs/snippets/use-cases-cards.mdx diff --git a/docs/docs.json b/docs/docs.json index 6ba6c67472..5339d3a556 100644 --- a/docs/docs.json +++ b/docs/docs.json @@ -333,6 +333,15 @@ } ] }, + { + "group": "Use cases", + "pages": [ + "guides/use-cases/data-processing-etl", + "guides/use-cases/media-generation", + "guides/use-cases/media-processing", + "guides/use-cases/marketing" + ] + }, { "group": "Example projects", "pages": [ diff --git a/docs/guides/introduction.mdx b/docs/guides/introduction.mdx index 420494ea66..2af52dc13b 100644 --- a/docs/guides/introduction.mdx +++ b/docs/guides/introduction.mdx @@ -15,6 +15,41 @@ mode: "center" +## Use cases + +Explore comprehensive guides for common use cases and workflows: + + + + Transform and move data reliably at any scale. Whether streaming real-time analytics, enriching + customer records, or synchronizing large datasets. + + + Create scalable content workflows from single assets to thousands. Generate PDFs, videos, or + social media content with progress tracking and approval steps. + + + Process audio, video, and images efficiently with adaptive workflows. From real-time + transcription to batch optimization. + + + Orchestrate campaigns that respond dynamically to user behavior. Build drip sequences, + personalization engines, and analytics workflows. + + + ## Guides Get set up fast using our detailed walk-through guides. diff --git a/docs/guides/use-cases/data-processing-etl.mdx b/docs/guides/use-cases/data-processing-etl.mdx new file mode 100644 index 0000000000..2a4d0072e4 --- /dev/null +++ b/docs/guides/use-cases/data-processing-etl.mdx @@ -0,0 +1,106 @@ +--- +title: "Data Processing & ETL" +description: "Build reliable data processing and ETL pipelines with automatic retries, progress tracking, and no timeout limits using Trigger.dev" +--- + +import UseCasesCards from "/snippets/use-cases-cards.mdx"; + +## Overview + +Data processing and ETL (Extract, Transform, Load) workflows require handling large datasets, complex transformations, and reliable data movement between systems. Build robust data pipelines in TypeScript with automatic retries, progress tracking, and no timeout limits; perfect for web scraping, database synchronization, real-time analytics, and large-scale data transformation. + +## Basic data processing and ETL workflow implementation + +A typical ETL pipeline: + +1. **Extract**: Pull from APIs, databases, S3, or web scraping +2. **Transform**: Clean, validate, enrich data +3. **Load**: Write to warehouse, database, or storage +4. **Monitor**: Track progress, handle failures + +Each step is durable and retryable—if transformation fails, Trigger.dev automatically retries without re-extracting source data thanks to [checkpoint-resume](/how-it-works#the-checkpoint-resume-system) and [idempotency keys](/idempotency). + +Trigger.dev is ideal for ETL pipelines because there are no [timeout limits](/runs/max-duration) (process datasets for hours or days), [batchTriggerAndWait()](/triggering#yourtask-batchtriggerandwait) parallelizes across thousands of records with [queue.concurrencyLimit](/queue-concurrency) to respect API rate limits, [metadata](/runs/metadata) + [realtime](/realtime) stream row-by-row progress to dashboards, and [schedules.task()](/tasks/scheduled) handles recurring jobs with cron syntax. + +## Data processing workflow examples + + + + Import CSV files with progress tracking streamed to the frontend. + + + Scrape Hacker News using BrowserBase and Puppeteer, summarize with ChatGPT. + + + Crawl URLs and return LLM-ready markdown using Firecrawl. + + + Run CRUD operations on a Supabase database table. + + + Trigger tasks from database changes using Sequin's CDC platform. + + + Automatically sync environment variables from Vercel projects. + + + +## Production use cases + + + +Read how Papermark processes thousands of documents per month using Trigger.dev. + + + +## Common data processing patterns + +### Scheduled Data Syncs + +Run ETL jobs on a schedule to keep systems in sync: + +- Daily database exports and backups +- Hourly API data pulls and transformations +- Real-time webhook processing and routing +- Periodic data warehouse updates + +### Event-Driven Processing + +Respond to data events with automated workflows: + +- Process new database records as they're created +- Transform uploaded files immediately +- React to webhook events from external systems +- Handle real-time data streams + +### Batch Processing + +Process large datasets efficiently: + +- Import CSV files with thousands of rows +- Bulk update records across systems +- Process queued data in parallel batches +- Generate reports from aggregated data + +### Pipeline Orchestration + +Chain multiple processing steps together: + +- Extract from API → Transform → Load to database +- Web scraping → Data cleaning → Analysis → Storage +- File upload → Validation → Processing → Notification +- Multi-source data aggregation and enrichment + + diff --git a/docs/guides/use-cases/marketing.mdx b/docs/guides/use-cases/marketing.mdx new file mode 100644 index 0000000000..06c01e3c72 --- /dev/null +++ b/docs/guides/use-cases/marketing.mdx @@ -0,0 +1,103 @@ +--- +title: "Marketing" +description: "Orchestrate campaigns that respond dynamically to user behavior. Build drip sequences, personalization engines, and analytics workflows with Trigger.dev" +--- + +import UseCasesCards from "/snippets/use-cases-cards.mdx"; + +## Overview + +Marketing automation workflows require orchestrating complex campaigns that respond to user behavior, send personalized content, and track engagement across multiple channels. Build reliable marketing pipelines in TypeScript with automatic retries, progress tracking, and no timeout limits; perfect for email sequences, customer journeys, personalization engines, and analytics workflows. + +## Basic marketing workflow implementation + +A typical marketing automation workflow: + +1. **Trigger**: User event, [webhook](/triggering), or [scheduled task](/tasks/scheduled) +2. **Segment**: Query database for targeting criteria +3. **Generate**: Create personalized content (AI-generated copy, dynamic images) +4. **Send**: Deliver via email/SMS APIs with [retry logic](/errors-retrying) +5. **Track**: Store engagement data, [trigger follow-ups](/triggering#yourtask-trigger) + +Each step is durable and retryable—if email sending fails, Trigger.dev automatically retries without regenerating personalized content. + +Trigger.dev is ideal for marketing automation because [wait.for()](/wait-for) and [wait.until()](/wait-until) let you build drip campaigns with precise delays (hours to weeks) without paying for idle time, [batchTriggerAndWait()](/triggering#yourtask-batchtriggerandwait) sends to thousands in parallel with configurable concurrency, and [idempotency keys](/idempotency) ensure users never receive duplicate messages even when tasks retry. + +## Common marketing workflow patterns + +### Drip Campaigns + +Use scheduled tasks and wait conditions to create sophisticated email sequences: + +- Welcome series for new subscribers +- Onboarding sequences for new users +- Re-engagement campaigns for inactive users +- Product education sequences + +### Behavioral Triggers + +Respond to user actions with personalized campaigns: + +- Cart abandonment recovery +- Post-purchase follow-up +- Feature usage-based messaging +- Milestone celebration campaigns + +### Personalization at Scale + +Generate personalized content for thousands of users: + +- Dynamic email content based on user preferences +- Personalized product recommendations +- Custom landing pages for different segments +- Individualized promotional offers + +### Analytics and Reporting + +Track campaign performance and optimize: + +- A/B test different message variants +- Monitor engagement metrics across channels +- Generate automated performance reports +- Trigger campaigns based on performance thresholds + +## Marketing workflow examples + + + + Send a sequence of emails over several days using Resend. + + + Send emails using React Email with JSX email templates. + + + Trigger campaigns from Stripe payment events and subscription changes. + + + Chain prompts to generate and translate copy in multiple languages. + + + Transform basic product photos into professional marketing images. + + + Add human approval steps to workflows using ReactFlow and waitpoint tokens. + + + + diff --git a/docs/guides/use-cases/media-generation.mdx b/docs/guides/use-cases/media-generation.mdx new file mode 100644 index 0000000000..d676175152 --- /dev/null +++ b/docs/guides/use-cases/media-generation.mdx @@ -0,0 +1,111 @@ +--- +title: "AI-Powered Media Generation" +description: "Build reliable AI media generation workflows for images, videos, audio, and more with Trigger.dev. No timeouts, automatic retries, and real-time progress tracking" +--- + +import UseCasesCards from "/snippets/use-cases-cards.mdx"; + +## Overview + +AI-powered media generation workflows require handling unpredictable API latencies, managing rate limits, and processing computationally intensive operations. Build reliable AI content generation pipelines in TypeScript with automatic retries, progress tracking, and no timeout limits; perfect for AI image generation, video synthesis, audio creation, and multi-modal content workflows. + +## Basic AI media generation workflow implementation + +A typical AI generation pipeline: + +1. **Input**: Receive prompts, parameters, reference images +2. **Generate**: Call AI APIs (OpenAI, Replicate, Fal.ai, Stability, etc.) +3. **Post-process**: Upscale, optimize, apply transformations +4. **Review**: Human approval or automated quality checks +5. **Deliver**: Upload to storage, update database + +Each step is durable and retryable—if an AI API call times out, Trigger.dev automatically retries without re-executing expensive prompt generation thanks to [checkpoint-resume](/how-it-works#the-checkpoint-resume-system) and [idempotency](/idempotency). + +Trigger.dev is ideal for AI media generation because [checkpoint-resume](/how-it-works#the-checkpoint-resume-system) pauses during AI API calls (you only pay for active compute, not inference time), no [timeout limits](/runs/max-duration) means generation can take minutes or hours, [batchTriggerAndWait()](/triggering#yourtask-batchtriggerandwait) generates hundreds in parallel with [queue.concurrencyLimit](/queue-concurrency) respecting API rate limits, [metadata.stream()](/runs/metadata#stream) + [realtime](/realtime) stream previews to your frontend, and [wait.for()](/wait-for-token) adds human approval gates for brand safety. + +## AI media generation workflow examples + + + + Transform product photos into professional marketing images using Replicate. + + + Generate images from text prompts using OpenAI's DALL·E 3. + + + Generate images with Fal.ai and stream progress updates in real-time. + + + Build image generation workflows using Vercel's AI SDK. + + + Generate memes with DALL·E 3 and add human approval steps. + + + Build a research agent that generates comprehensive reports with Vercel AI SDK. + + + +## Production use cases + + + +Read how Icon uses Trigger.dev to process and generate thousands of AI-powered videos per month for their AI-driven video creation platform. + + + +## Common AI generation patterns + +### Single Media Generation + +Generate individual AI-powered content on demand: + +- Generate images from text prompts +- Transform existing images with AI models +- Create variations of source images +- Apply AI filters and effects + +### Batch Generation + +Create content at scale for production workflows: + +- Generate hundreds of product images +- Create personalized marketing assets at scale +- Generate thumbnail variations for A/B testing +- Bulk process images through AI models + +### Multi-step AI Pipelines + +Chain multiple AI operations for sophisticated workflows: + +- Prompt generation → Image generation → Upscaling +- Image generation → Style transfer → Optimization +- Audio generation → Transcription → Translation +- Video generation → Thumbnail extraction → Metadata generation + +### Human-in-the-loop + +Add review and approval steps to AI workflows: + +- Generate content → Human review → Publish +- AI moderation → Flagged content review → Decision +- Batch generation → Sample review → Approve batch +- Iterative refinement with human feedback + + diff --git a/docs/guides/use-cases/media-processing.mdx b/docs/guides/use-cases/media-processing.mdx new file mode 100644 index 0000000000..290ab4c90a --- /dev/null +++ b/docs/guides/use-cases/media-processing.mdx @@ -0,0 +1,107 @@ +--- +title: "Media Processing" +description: "Build reliable media processing workflows for video transcoding, image processing, audio transformation, and more with Trigger.dev" +--- + +import UseCasesCards from "/snippets/use-cases-cards.mdx"; + +## Overview + +Media processing workflows require handling large files (videos, images, audio), long-running operations, and resource-intensive transformations. Build reliable media processing pipelines in TypeScript with automatic retries, progress tracking, and no timeout limits; perfect for video transcoding, image optimization, audio processing, and AI-powered media analysis. + +## Media processing workflow implementation + +A typical media processing pipeline: + +1. **Trigger**: Upload webhook or S3 event +2. **Download**: Fetch source file (video, image, audio, document) +3. **Process**: Transcode, resize, compress, extract frames +4. **Upload**: Store results to S3/R2/storage +5. **Cleanup**: Update database, delete temp files + +Each step is durable and retryable—if FFmpeg processing fails, Trigger.dev automatically retries without re-downloading the source file thanks to [checkpoint-resume](/how-it-works#the-checkpoint-resume-system) and [idempotency](/idempotency). + +Trigger.dev is ideal for media processing because there are no [timeout limits](/runs/max-duration) (transcode multi-hour videos or run CPU-intensive operations indefinitely), [batchTriggerAndWait()](/triggering#yourtask-batchtriggerandwait) processes hundreds of files in parallel with [queue.concurrencyLimit](/queue-concurrency) controlling resource usage, [metadata](/runs/metadata) + [realtime](/realtime) stream processing progress to your frontend, and [wait.for()](/wait-for-token) enables human approval workflows for content moderation. + +## Media processing workflow examples + + + + Process videos and upload results to R2 storage using FFmpeg. + + + Transcribe audio files using Deepgram's speech recognition API. + + + Convert documents to PDF using LibreOffice. + + + Extract form data from PDFs using Python and PyMuPDF. + + + +## Production use cases + + + +Read how Icon uses Trigger.dev to process and generate thousands of videos per month for their AI-driven video creation platform. + + + + + +Read how Papermark process thousands of documents per month using Trigger.dev. + + + +## Common media processing patterns + +### Video Processing + +Transform and optimize video content at scale: + +- Transcode videos to multiple formats and resolutions +- Generate thumbnails and preview clips +- Add watermarks, captions, or overlays +- Extract audio tracks or video frames + +### Image Processing + +Optimize and transform images for any use case: + +- Resize and compress for web delivery +- Generate multiple variants (thumbnails, crops, formats) +- Apply filters, effects, or branding +- Convert between formats (PNG, JPEG, WebP) + +### Audio Processing + +Process and transform audio files: + +- Transcribe speech to text +- Convert between audio formats +- Extract audio from video files +- Apply audio effects and normalization + +### Document Processing + +Handle document transformation workflows: + +- Convert documents to PDF +- Extract text and form data +- Generate document previews +- OCR and text extraction + + diff --git a/docs/snippets/use-cases-cards.mdx b/docs/snippets/use-cases-cards.mdx new file mode 100644 index 0000000000..2ea6744461 --- /dev/null +++ b/docs/snippets/use-cases-cards.mdx @@ -0,0 +1,46 @@ +## Use cases + + + + Build intelligent workflows that combine AI processing with human oversight. From research + agents to customer support systems and content moderation pipelines. + + + Transform and move data reliably at any scale. Whether streaming real-time analytics, enriching + customer records, or synchronizing large datasets. + + + Create scalable content workflows from single assets to thousands. Generate PDFs, videos, or + social media content with progress tracking and approval steps. + + + Generate images, videos, and audio with AI models. Handle unpredictable API latencies, + manage rate limits, and stream progress in real-time. + + + Process audio, video, and images efficiently with adaptive workflows. From real-time + transcription to batch optimization. + + + Orchestrate campaigns that respond dynamically to user behavior. Build drip sequences, + personalization engines, and analytics workflows. + + From e38c06237c1dede173b6ab55e28a3a819bcfe8c8 Mon Sep 17 00:00:00 2001 From: D-K-P <8297864+D-K-P@users.noreply.github.com> Date: Wed, 29 Oct 2025 12:21:17 +0000 Subject: [PATCH 02/13] Updates --- .../v3/services/aiRunFilterService.server.ts | 60 +++++--- docs/guides/use-cases/data-processing-etl.mdx | 140 ++++++++--------- docs/guides/use-cases/marketing.mdx | 138 ++++++++--------- docs/guides/use-cases/media-generation.mdx | 145 ++++++++---------- docs/guides/use-cases/media-processing.mdx | 136 +++++++--------- 5 files changed, 283 insertions(+), 336 deletions(-) diff --git a/apps/webapp/app/v3/services/aiRunFilterService.server.ts b/apps/webapp/app/v3/services/aiRunFilterService.server.ts index 4ce12b9455..0dcab5c04e 100644 --- a/apps/webapp/app/v3/services/aiRunFilterService.server.ts +++ b/apps/webapp/app/v3/services/aiRunFilterService.server.ts @@ -30,7 +30,7 @@ const AIFilterResponseSchema = z export interface QueryQueues { query( search: string | undefined, - type: "task" | "custom" | undefined + type: "task" | "custom" | undefined, ): Promise<{ queues: string[]; }>; @@ -39,14 +39,14 @@ export interface QueryQueues { export interface QueryVersions { query( versionPrefix: string | undefined, - isCurrent: boolean | undefined + isCurrent: boolean | undefined, ): Promise< | { - versions: string[]; - } + versions: string[]; + } | { - version: string; - } + version: string; + } >; } @@ -64,13 +64,13 @@ export interface QueryTasks { export type AIFilterResult = | { - success: true; - filters: TaskRunListSearchFilters; - } + success: true; + filters: TaskRunListSearchFilters; + } | { - success: false; - error: string; - }; + success: false; + error: string; + }; export class AIRunFilterService { constructor( @@ -80,7 +80,7 @@ export class AIRunFilterService { queryQueues: QueryQueues; queryTasks: QueryTasks; }, - private readonly model: LanguageModelV1 = openai("gpt-4o-mini") + private readonly model: LanguageModelV1 = openai("gpt-4o-mini"), ) {} async call(text: string, environmentId: string): Promise { @@ -92,7 +92,9 @@ export class AIRunFilterService { lookupTags: tool({ description: "Look up available tags in the environment", parameters: z.object({ - query: z.string().optional().describe("Optional search query to filter tags"), + query: z.string().optional().describe( + "Optional search query to filter tags", + ), }), execute: async ({ query }) => { return await this.queryFns.queryTags.query(query); @@ -110,22 +112,27 @@ export class AIRunFilterService { .string() .optional() .describe( - "Optional version name to filter (e.g. 20250701.1), it uses contains to compare. Don't pass `latest` or `current`, the query has to be in the reverse date format specified. Leave out to get all recent versions." + "Optional version name to filter (e.g. 20250701.1), it uses contains to compare. Don't pass `latest` or `current`, the query has to be in the reverse date format specified. Leave out to get all recent versions.", ), }), execute: async ({ versionPrefix, isCurrent }) => { - return await this.queryFns.queryVersions.query(versionPrefix, isCurrent); + return await this.queryFns.queryVersions.query( + versionPrefix, + isCurrent, + ); }, }), lookupQueues: tool({ description: "Look up available queues in the environment", parameters: z.object({ - query: z.string().optional().describe("Optional search query to filter queues"), + query: z.string().optional().describe( + "Optional search query to filter queues", + ), type: z .enum(["task", "custom"]) .optional() .describe( - "Filter by queue type, only do this if the user specifies it explicitly." + "Filter by queue type, only do this if the user specifies it explicitly.", ), }), execute: async ({ query, type }) => { @@ -142,12 +149,15 @@ export class AIRunFilterService { }), }, maxSteps: 5, - system: `You are an AI assistant that converts natural language descriptions into structured filter parameters for a task run filtering system. + system: + `You are an AI assistant that converts natural language descriptions into structured filter parameters for a task run filtering system. Available filter options: - statuses: Array of run statuses (PENDING, EXECUTING, COMPLETED_SUCCESSFULLY, COMPLETED_WITH_ERRORS, CANCELED, TIMED_OUT, CRASHED, etc.) - period: Time period string (e.g., "1h", "7d", "30d", "1y") - - from/to: ISO date string. Today's date is ${new Date().toISOString()}, if they only specify a day use the current month. If they don't specify a year use the current year. If they don't specify a time of day use midnight. + - from/to: ISO date string. Today's date is ${ + new Date().toISOString() + }, if they only specify a day use the current month. If they don't specify a year use the current year. If they don't specify a time of day use midnight. - tags: Array of tag names to filter by. Use the lookupTags tool to get the tags. - tasks: Array of task identifiers to filter by. Use the lookupTasks tool to get the tasks. - machines: Array of machine presets (micro, small, small-2x, medium, large, xlarge, etc.) @@ -159,7 +169,7 @@ export class AIRunFilterService { - scheduleId: Specific schedule ID to filter by - Common patterns to recognize: + Common workflows to recognize: - "failed runs" → statuses: ["COMPLETED_WITH_ERRORS", "CRASHED", "TIMED_OUT", "SYSTEM_FAILURE"]. - "runs not dequeued yet" → statuses: ["PENDING", "PENDING_VERSION", "DELAYED"] - If they say "only failed" then only use "COMPLETED_WITH_ERRORS". @@ -232,7 +242,9 @@ export class AIRunFilterService { } // Validate the filters against the schema to catch any issues - const validationResult = AIFilters.safeParse(result.experimental_output.filters); + const validationResult = AIFilters.safeParse( + result.experimental_output.filters, + ); if (!validationResult.success) { logger.error("AI filter validation failed", { errors: validationResult.error.errors, @@ -252,7 +264,9 @@ export class AIRunFilterService { from: validationResult.data.from ? new Date(validationResult.data.from).getTime() : undefined, - to: validationResult.data.to ? new Date(validationResult.data.to).getTime() : undefined, + to: validationResult.data.to + ? new Date(validationResult.data.to).getTime() + : undefined, }, }; } catch (error) { diff --git a/docs/guides/use-cases/data-processing-etl.mdx b/docs/guides/use-cases/data-processing-etl.mdx index 2a4d0072e4..0a9c4c7609 100644 --- a/docs/guides/use-cases/data-processing-etl.mdx +++ b/docs/guides/use-cases/data-processing-etl.mdx @@ -1,106 +1,88 @@ --- -title: "Data Processing & ETL" -description: "Build reliable data processing and ETL pipelines with automatic retries, progress tracking, and no timeout limits using Trigger.dev" +title: "Data processing & ETL workflows" +sidebarTitle: "Data processing & ETL" +description: "Learn how to use Trigger.dev for data processing and ETL including web scraping, database synchronization, batch enrichment, and streaming analytics workflows" --- import UseCasesCards from "/snippets/use-cases-cards.mdx"; ## Overview -Data processing and ETL (Extract, Transform, Load) workflows require handling large datasets, complex transformations, and reliable data movement between systems. Build robust data pipelines in TypeScript with automatic retries, progress tracking, and no timeout limits; perfect for web scraping, database synchronization, real-time analytics, and large-scale data transformation. +Build data pipelines that process large datasets without timeouts. Handle streaming analytics, batch enrichment, web scraping, database sync, and file processing with automatic retries and progress tracking. -## Basic data processing and ETL workflow implementation +## Featured examples -A typical ETL pipeline: - -1. **Extract**: Pull from APIs, databases, S3, or web scraping -2. **Transform**: Clean, validate, enrich data -3. **Load**: Write to warehouse, database, or storage -4. **Monitor**: Track progress, handle failures - -Each step is durable and retryable—if transformation fails, Trigger.dev automatically retries without re-extracting source data thanks to [checkpoint-resume](/how-it-works#the-checkpoint-resume-system) and [idempotency keys](/idempotency). - -Trigger.dev is ideal for ETL pipelines because there are no [timeout limits](/runs/max-duration) (process datasets for hours or days), [batchTriggerAndWait()](/triggering#yourtask-batchtriggerandwait) parallelizes across thousands of records with [queue.concurrencyLimit](/queue-concurrency) to respect API rate limits, [metadata](/runs/metadata) + [realtime](/realtime) stream row-by-row progress to dashboards, and [schedules.task()](/tasks/scheduled) handles recurring jobs with cron syntax. - -## Data processing workflow examples - - + - Import CSV files with progress tracking streamed to the frontend. + Import CSV files with progress streamed live to frontend. - Scrape Hacker News using BrowserBase and Puppeteer, summarize with ChatGPT. - - - Crawl URLs and return LLM-ready markdown using Firecrawl. + Scrape websites using BrowserBase and Puppeteer. - Run CRUD operations on a Supabase database table. - - - Trigger tasks from database changes using Sequin's CDC platform. - - - Automatically sync environment variables from Vercel projects. + Run CRUD operations on Supabase database tables. -## Production use cases - - - -Read how Papermark processes thousands of documents per month using Trigger.dev. - - - -## Common data processing patterns - -### Scheduled Data Syncs - -Run ETL jobs on a schedule to keep systems in sync: - -- Daily database exports and backups -- Hourly API data pulls and transformations -- Real-time webhook processing and routing -- Periodic data warehouse updates - -### Event-Driven Processing - -Respond to data events with automated workflows: - -- Process new database records as they're created -- Transform uploaded files immediately -- React to webhook events from external systems -- Handle real-time data streams - -### Batch Processing - -Process large datasets efficiently: - -- Import CSV files with thousands of rows -- Bulk update records across systems -- Process queued data in parallel batches -- Generate reports from aggregated data - -### Pipeline Orchestration - -Chain multiple processing steps together: - -- Extract from API → Transform → Load to database -- Web scraping → Data cleaning → Analysis → Storage -- File upload → Validation → Processing → Notification -- Multi-source data aggregation and enrichment +## Why Trigger.dev for data processing + +**Process datasets for hours without timeouts** + +Handle multi-hour transformations, large file processing, or complete database exports. No execution time limits. + +**Parallel processing with built-in rate limiting** + +Process thousands of records simultaneously while respecting API rate limits. Scale efficiently without overwhelming downstream services. + +**Stream progress to your users in real-time** + +Show row-by-row processing status updating live in your dashboard. Users see exactly where processing is and how long remains. + +## Common workflows + +Here are some basic examples of data processing and ETL workflows: + + + + + Pull from APIs, databases, S3, or web scraping + Clean, validate, enrich data + Write to warehouse, database, or storage + Track progress, handle failures + + + + + Load target pages with headless browser + Pull content, links, structured data + Clean HTML, parse JSON, normalize data + Save to database or file storage + + + + + Fetch records needing enrichment + Call external APIs in parallel batches + Check data quality and completeness + Write enriched data back to database + + + + + Receive file via webhook or storage event + Read CSV, JSON, XML, or binary format + Transform, validate, chunk large files + Bulk insert to database or data warehouse + + + diff --git a/docs/guides/use-cases/marketing.mdx b/docs/guides/use-cases/marketing.mdx index 06c01e3c72..e4cc478376 100644 --- a/docs/guides/use-cases/marketing.mdx +++ b/docs/guides/use-cases/marketing.mdx @@ -1,103 +1,95 @@ --- -title: "Marketing" -description: "Orchestrate campaigns that respond dynamically to user behavior. Build drip sequences, personalization engines, and analytics workflows with Trigger.dev" +title: "Marketing workflows" +sidebarTitle: "Marketing workflows" +description: "Learn how to use Trigger.dev for marketing workflows, including drip campaigns, behavioral triggers, personalization engines, and AI-powered content workflows" --- import UseCasesCards from "/snippets/use-cases-cards.mdx"; ## Overview -Marketing automation workflows require orchestrating complex campaigns that respond to user behavior, send personalized content, and track engagement across multiple channels. Build reliable marketing pipelines in TypeScript with automatic retries, progress tracking, and no timeout limits; perfect for email sequences, customer journeys, personalization engines, and analytics workflows. +Build marketing workflows from drip campaigns to real-time personalization. Handle multi-day sequences, behavioral triggers, dynamic content generation, and live analytics, all with automatic retries and progress tracking. -## Basic marketing workflow implementation +## Featured examples -A typical marketing automation workflow: - -1. **Trigger**: User event, [webhook](/triggering), or [scheduled task](/tasks/scheduled) -2. **Segment**: Query database for targeting criteria -3. **Generate**: Create personalized content (AI-generated copy, dynamic images) -4. **Send**: Deliver via email/SMS APIs with [retry logic](/errors-retrying) -5. **Track**: Store engagement data, [trigger follow-ups](/triggering#yourtask-trigger) - -Each step is durable and retryable—if email sending fails, Trigger.dev automatically retries without regenerating personalized content. - -Trigger.dev is ideal for marketing automation because [wait.for()](/wait-for) and [wait.until()](/wait-until) let you build drip campaigns with precise delays (hours to weeks) without paying for idle time, [batchTriggerAndWait()](/triggering#yourtask-batchtriggerandwait) sends to thousands in parallel with configurable concurrency, and [idempotency keys](/idempotency) ensure users never receive duplicate messages even when tasks retry. - -## Common marketing workflow patterns - -### Drip Campaigns - -Use scheduled tasks and wait conditions to create sophisticated email sequences: - -- Welcome series for new subscribers -- Onboarding sequences for new users -- Re-engagement campaigns for inactive users -- Product education sequences - -### Behavioral Triggers - -Respond to user actions with personalized campaigns: - -- Cart abandonment recovery -- Post-purchase follow-up -- Feature usage-based messaging -- Milestone celebration campaigns - -### Personalization at Scale - -Generate personalized content for thousands of users: - -- Dynamic email content based on user preferences -- Personalized product recommendations -- Custom landing pages for different segments -- Individualized promotional offers - -### Analytics and Reporting - -Track campaign performance and optimize: - -- A/B test different message variants -- Monitor engagement metrics across channels -- Generate automated performance reports -- Trigger campaigns based on performance thresholds - -## Marketing workflow examples - - + - Send a sequence of emails over several days using Resend. - - - Send emails using React Email with JSX email templates. - - - Trigger campaigns from Stripe payment events and subscription changes. - - - Chain prompts to generate and translate copy in multiple languages. + Send multi-day email sequences with wait delays between messages. - Transform basic product photos into professional marketing images. + Transform product photos into professional marketing images with AI. - Add human approval steps to workflows using ReactFlow and waitpoint tokens. + Add approval gates to workflows using waitpoint tokens. +## Why Trigger.dev for marketing + +**Delays without idle costs** + +Wait hours or weeks between steps. Pay only for active compute, not wait time. Perfect for drip campaigns and scheduled follow-ups. + +**Guaranteed delivery** + +Messages send exactly once, even after retries. Personalized content isn't regenerated on failure. + +**Scale without limits** + +Process thousands in parallel while respecting rate limits. Send to entire segments without overwhelming APIs. + +## Common workflows + +Here are some basic examples of marketing workflows: + + + + + User signs up or performs action + Query database for targeting criteria + Pause for specified time (hours/days) + Deliver personalized message + Log engagement and trigger next step + + + + + Cart add, page view, feature usage + Pull user profile and preferences + Create personalized message + Send via email, SMS, or in-app + + + + + Generate AI content or asset + Pause for approval workflow + Apply feedback if needed + Mark ready for publishing + Deploy to channels + + + + + User completes survey + Add context from CRM + Score and categorize + Save insights + Send personalized response + + + + diff --git a/docs/guides/use-cases/media-generation.mdx b/docs/guides/use-cases/media-generation.mdx index d676175152..3955632c1c 100644 --- a/docs/guides/use-cases/media-generation.mdx +++ b/docs/guides/use-cases/media-generation.mdx @@ -1,50 +1,27 @@ --- -title: "AI-Powered Media Generation" -description: "Build reliable AI media generation workflows for images, videos, audio, and more with Trigger.dev. No timeouts, automatic retries, and real-time progress tracking" +title: "AI-powered media generation workflows" +sidebarTitle: "AI-powered media generation" +description: "Learn how to use Trigger.dev for AI media generation including image creation, video synthesis, audio generation, and multi-modal content workflows" --- import UseCasesCards from "/snippets/use-cases-cards.mdx"; ## Overview -AI-powered media generation workflows require handling unpredictable API latencies, managing rate limits, and processing computationally intensive operations. Build reliable AI content generation pipelines in TypeScript with automatic retries, progress tracking, and no timeout limits; perfect for AI image generation, video synthesis, audio creation, and multi-modal content workflows. +Build AI media generation pipelines that handle unpredictable API latencies and long-running operations. Generate images, videos, audio, and multi-modal content with automatic retries, progress tracking, and no timeout limits. -## Basic AI media generation workflow implementation +## Featured examples -A typical AI generation pipeline: - -1. **Input**: Receive prompts, parameters, reference images -2. **Generate**: Call AI APIs (OpenAI, Replicate, Fal.ai, Stability, etc.) -3. **Post-process**: Upscale, optimize, apply transformations -4. **Review**: Human approval or automated quality checks -5. **Deliver**: Upload to storage, update database - -Each step is durable and retryable—if an AI API call times out, Trigger.dev automatically retries without re-executing expensive prompt generation thanks to [checkpoint-resume](/how-it-works#the-checkpoint-resume-system) and [idempotency](/idempotency). - -Trigger.dev is ideal for AI media generation because [checkpoint-resume](/how-it-works#the-checkpoint-resume-system) pauses during AI API calls (you only pay for active compute, not inference time), no [timeout limits](/runs/max-duration) means generation can take minutes or hours, [batchTriggerAndWait()](/triggering#yourtask-batchtriggerandwait) generates hundreds in parallel with [queue.concurrencyLimit](/queue-concurrency) respecting API rate limits, [metadata.stream()](/runs/metadata#stream) + [realtime](/realtime) stream previews to your frontend, and [wait.for()](/wait-for-token) adds human approval gates for brand safety. - -## AI media generation workflow examples - - - - Transform product photos into professional marketing images using Replicate. - + Generate images from text prompts using OpenAI's DALL·E 3. - - Generate images with Fal.ai and stream progress updates in real-time. - - Build image generation workflows using Vercel's AI SDK. + Transform product photos into professional marketing images using Replicate. Generate memes with DALL·E 3 and add human approval steps. - - Build a research agent that generates comprehensive reports with Vercel AI SDK. - -## Production use cases - - - -Read how Icon uses Trigger.dev to process and generate thousands of AI-powered videos per month for their AI-driven video creation platform. - - - -## Common AI generation patterns - -### Single Media Generation - -Generate individual AI-powered content on demand: - -- Generate images from text prompts -- Transform existing images with AI models -- Create variations of source images -- Apply AI filters and effects - -### Batch Generation - -Create content at scale for production workflows: - -- Generate hundreds of product images -- Create personalized marketing assets at scale -- Generate thumbnail variations for A/B testing -- Bulk process images through AI models - -### Multi-step AI Pipelines - -Chain multiple AI operations for sophisticated workflows: - -- Prompt generation → Image generation → Upscaling -- Image generation → Style transfer → Optimization -- Audio generation → Transcription → Translation -- Video generation → Thumbnail extraction → Metadata generation - -### Human-in-the-loop - -Add review and approval steps to AI workflows: - -- Generate content → Human review → Publish -- AI moderation → Flagged content review → Decision -- Batch generation → Sample review → Approve batch -- Iterative refinement with human feedback +## Why Trigger.dev for AI media generation + +**Pay only for active compute, not AI inference time** + +Checkpoint-resume pauses during AI API calls. Generate content that takes minutes or hours without paying for idle inference time. + +**No timeout limits for long generations** + +Handle generations that take minutes or hours without execution limits. Perfect for high-quality video synthesis and complex multi-modal workflows. + +**Human approval gates for brand safety** + +Add review steps before publishing AI-generated content. Pause workflows for human approval using waitpoint tokens. + +## Common workflows + +Here are some basic examples of AI media generation workflows: + + + + + Receive prompts and parameters + Call AI API (OpenAI, Replicate, Fal.ai) + Upscale, optimize, apply transformations + Upload to storage, update database + + + + + Receive batch of generation requests + Process hundreds in parallel with rate limiting + Validate outputs, filter failures + Store results, notify completion + + + + + Create initial content with AI + Apply style transfer or enhancement + Increase quality/resolution + Compress and format for delivery + + + + + Create AI content + Pause for human approval + Apply feedback if needed + Deploy approved content + + + diff --git a/docs/guides/use-cases/media-processing.mdx b/docs/guides/use-cases/media-processing.mdx index 290ab4c90a..f1787502f7 100644 --- a/docs/guides/use-cases/media-processing.mdx +++ b/docs/guides/use-cases/media-processing.mdx @@ -1,31 +1,18 @@ --- -title: "Media Processing" -description: "Build reliable media processing workflows for video transcoding, image processing, audio transformation, and more with Trigger.dev" +title: "Media processing workflows" +sidebarTitle: "Media processing" +description: "Learn how to use Trigger.dev for media processing including video transcoding, image optimization, audio transformation, and document conversion workflows" --- import UseCasesCards from "/snippets/use-cases-cards.mdx"; ## Overview -Media processing workflows require handling large files (videos, images, audio), long-running operations, and resource-intensive transformations. Build reliable media processing pipelines in TypeScript with automatic retries, progress tracking, and no timeout limits; perfect for video transcoding, image optimization, audio processing, and AI-powered media analysis. +Build media processing pipelines that handle large files and long-running operations. Process videos, images, audio, and documents with automatic retries, progress tracking, and no timeout limits. -## Media processing workflow implementation +## Featured examples -A typical media processing pipeline: - -1. **Trigger**: Upload webhook or S3 event -2. **Download**: Fetch source file (video, image, audio, document) -3. **Process**: Transcode, resize, compress, extract frames -4. **Upload**: Store results to S3/R2/storage -5. **Cleanup**: Update database, delete temp files - -Each step is durable and retryable—if FFmpeg processing fails, Trigger.dev automatically retries without re-downloading the source file thanks to [checkpoint-resume](/how-it-works#the-checkpoint-resume-system) and [idempotency](/idempotency). - -Trigger.dev is ideal for media processing because there are no [timeout limits](/runs/max-duration) (transcode multi-hour videos or run CPU-intensive operations indefinitely), [batchTriggerAndWait()](/triggering#yourtask-batchtriggerandwait) processes hundreds of files in parallel with [queue.concurrencyLimit](/queue-concurrency) controlling resource usage, [metadata](/runs/metadata) + [realtime](/realtime) stream processing progress to your frontend, and [wait.for()](/wait-for-token) enables human approval workflows for content moderation. - -## Media processing workflow examples - - + Process videos and upload results to R2 storage using FFmpeg. @@ -43,65 +30,60 @@ Trigger.dev is ideal for media processing because there are no [timeout limits]( > Convert documents to PDF using LibreOffice. - - Extract form data from PDFs using Python and PyMuPDF. - -## Production use cases - - - -Read how Icon uses Trigger.dev to process and generate thousands of videos per month for their AI-driven video creation platform. - - - - - -Read how Papermark process thousands of documents per month using Trigger.dev. - - - -## Common media processing patterns - -### Video Processing - -Transform and optimize video content at scale: - -- Transcode videos to multiple formats and resolutions -- Generate thumbnails and preview clips -- Add watermarks, captions, or overlays -- Extract audio tracks or video frames - -### Image Processing - -Optimize and transform images for any use case: - -- Resize and compress for web delivery -- Generate multiple variants (thumbnails, crops, formats) -- Apply filters, effects, or branding -- Convert between formats (PNG, JPEG, WebP) - -### Audio Processing - -Process and transform audio files: - -- Transcribe speech to text -- Convert between audio formats -- Extract audio from video files -- Apply audio effects and normalization - -### Document Processing - -Handle document transformation workflows: - -- Convert documents to PDF -- Extract text and form data -- Generate document previews -- OCR and text extraction +## Why Trigger.dev for media processing + +### Process multi-hour videos without timeouts + +Transcode videos, extract frames, or run CPU-intensive operations for hours. No execution time limits. + +### Stream progress to users in real-time + +Show processing status updating live in your UI. Users see exactly where encoding is and how long remains. + +### Parallel processing with resource control + +Process hundreds of files simultaneously with configurable concurrency limits. Control resource usage without overwhelming infrastructure. + +## Common workflows + +Here are some basic examples of media processing workflows: + + + + + Receive upload webhook or storage event + Fetch source video file + Convert to multiple formats/resolutions with FFmpeg + Store results to S3/R2/storage + Update database, delete temp files + + + + + Receive image from user or webhook + Resize, compress, apply filters + Generate thumbnails and multiple formats + Save to storage and update database + + + + + Fetch audio file from storage + Convert speech to text + Apply effects, normalize, convert format + Store results and notify completion + + + + + Get document via upload or webhook + Transform to PDF or extract text + Pull form data or run OCR + Save processed results + + + From f43468e5d864ca496564ce0a2535728413b63403 Mon Sep 17 00:00:00 2001 From: D-K-P <8297864+D-K-P@users.noreply.github.com> Date: Wed, 29 Oct 2025 12:59:10 +0000 Subject: [PATCH 03/13] More improvements --- docs/guides/use-cases/data-processing-etl.mdx | 114 +++++++++--- docs/guides/use-cases/marketing.mdx | 112 ++++++++---- docs/guides/use-cases/media-generation.mdx | 98 ++++++++--- docs/guides/use-cases/media-processing.mdx | 165 ++++++++++++++---- 4 files changed, 369 insertions(+), 120 deletions(-) diff --git a/docs/guides/use-cases/data-processing-etl.mdx b/docs/guides/use-cases/data-processing-etl.mdx index 0a9c4c7609..934c8428f0 100644 --- a/docs/guides/use-cases/data-processing-etl.mdx +++ b/docs/guides/use-cases/data-processing-etl.mdx @@ -46,42 +46,98 @@ Process thousands of records simultaneously while respecting API rate limits. Sc Show row-by-row processing status updating live in your dashboard. Users see exactly where processing is and how long remains. -## Common workflows - -Here are some basic examples of data processing and ETL workflows: +## Example workflows + + Simple CSV import pipeline. Receives file upload, parses CSV rows, validates data, imports to database with progress tracking. + +
+ +```mermaid +graph TB + A[importCSV] --> B[parseCSVFile] + B --> C[validateRows] + C --> D[bulkInsertToDB] + D --> E[notifyCompletion] +``` + +
+
+ - - Pull from APIs, databases, S3, or web scraping - Clean, validate, enrich data - Write to warehouse, database, or storage - Track progress, handle failures - + **Coordinator pattern with parallel extraction**. Batch triggers parallel extraction from multiple sources (APIs, databases, S3), transforms and validates data, loads to data warehouse with monitoring. + +
+ +```mermaid +graph TB + A[runETLPipeline] --> B[coordinateExtraction] + B --> C[batchTriggerAndWait] + + C --> D[extractFromAPI] + C --> E[extractFromDatabase] + C --> F[extractFromS3] + + D --> G[transformData] + E --> G + F --> G + + G --> H[validateData] + H --> I[loadToWarehouse] +``` + +
+ - - Load target pages with headless browser - Pull content, links, structured data - Clean HTML, parse JSON, normalize data - Save to database or file storage - + **Coordinator pattern with browser automation**. Launches headless browsers in parallel to scrape multiple pages, extracts structured data, cleans and normalizes content, stores in database. + +
+ +```mermaid +graph TB + A[scrapeSite] --> B[coordinateScraping] + B --> C[batchTriggerAndWait] + + C --> D[scrapePage1] + C --> E[scrapePage2] + C --> F[scrapePageN] + + D --> G[cleanData] + E --> G + F --> G + + G --> H[normalizeData] + H --> I[storeInDatabase] +``` + +
+ - - Fetch records needing enrichment - Call external APIs in parallel batches - Check data quality and completeness - Write enriched data back to database - - - - - Receive file via webhook or storage event - Read CSV, JSON, XML, or binary format - Transform, validate, chunk large files - Bulk insert to database or data warehouse - + **Coordinator pattern with rate limiting**. Fetches records needing enrichment, batch triggers parallel API calls with configurable concurrency to respect rate limits, validates enriched data, updates database. + +
+ +```mermaid +graph TB + A[enrichRecords] --> B[fetchRecordsToEnrich] + B --> C[coordinateEnrichment] + C --> D[batchTriggerAndWait] + + D --> E[enrichRecord1] + D --> F[enrichRecord2] + D --> G[enrichRecordN] + + E --> H[validateEnrichedData] + F --> H + G --> H + + H --> I[updateDatabase] +``` + +
diff --git a/docs/guides/use-cases/marketing.mdx b/docs/guides/use-cases/marketing.mdx index e4cc478376..8bfd997e83 100644 --- a/docs/guides/use-cases/marketing.mdx +++ b/docs/guides/use-cases/marketing.mdx @@ -50,45 +50,93 @@ Messages send exactly once, even after retries. Personalized content isn't regen Process thousands in parallel while respecting rate limits. Send to entire segments without overwhelming APIs. -## Common workflows - -Here are some basic examples of marketing workflows: +## Example workflows - - - User signs up or performs action - Query database for targeting criteria - Pause for specified time (hours/days) - Deliver personalized message - Log engagement and trigger next step - + + Simple drip campaign. User signs up, waits specified delay, sends personalized email, tracks engagement. + +
+ +```mermaid +graph TB + A[startDripCampaign] --> B[fetchUserData] + B --> C[wait.for 24h] + C --> D[sendPersonalizedEmail] + D --> E[trackEngagement] +``` + +
- - - Cart add, page view, feature usage - Pull user profile and preferences - Create personalized message - Send via email, SMS, or in-app - + + + **Router pattern with delay orchestration**. User action triggers campaign, router selects channel based on preferences (email/SMS/push), coordinates multi-day sequence with delays between messages, tracks engagement across channels. + +
+ +```mermaid +graph TB + A[startCampaign] --> B[fetchUserProfile] + B --> C[selectChannel] + C --> D{Preferred
Channel?} + + D -->|Email| E[sendEmail1] + D -->|SMS| F[sendSMS1] + D -->|Push| G[sendPush1] + + E --> H[wait.for 2d] + F --> H + G --> H + + H --> I[sendFollowUp] + I --> J[trackConversion] +``` + +
+ - - Generate AI content or asset - Pause for approval workflow - Apply feedback if needed - Mark ready for publishing - Deploy to channels - + **Supervisor pattern with approval gate**. Generates AI marketing content (images, copy, assets), pauses with wait.for for human review, applies revisions if needed, publishes to channels after approval. + +
+ +```mermaid +graph TB + A[createCampaignAssets] --> B[generateAIContent] + B --> C[wait.for approval] + C --> D{Approved?} + + D -->|Yes| E[publishToChannels] + D -->|Needs revision| F[applyFeedback] + F --> B +``` + +
+ - - User completes survey - Add context from CRM - Score and categorize - Save insights - Send personalized response - + **Coordinator pattern with enrichment**. User completes survey, batch triggers parallel enrichment from CRM/analytics, analyzes and scores responses, updates customer profiles, triggers personalized follow-up campaigns. + +
+ +```mermaid +graph TB + A[processSurveyResponse] --> B[coordinateEnrichment] + B --> C[batchTriggerAndWait] + + C --> D[fetchCRMData] + C --> E[fetchAnalytics] + C --> F[fetchBehaviorData] + + D --> G[analyzeAndScore] + E --> G + F --> G + + G --> H[updateCRMProfile] + H --> I[triggerFollowUp] +``` + +
diff --git a/docs/guides/use-cases/media-generation.mdx b/docs/guides/use-cases/media-generation.mdx index 3955632c1c..7fa3d7be9e 100644 --- a/docs/guides/use-cases/media-generation.mdx +++ b/docs/guides/use-cases/media-generation.mdx @@ -46,42 +46,84 @@ Handle generations that take minutes or hours without execution limits. Perfect Add review steps before publishing AI-generated content. Pause workflows for human approval using waitpoint tokens. -## Common workflows - -Here are some basic examples of AI media generation workflows: +## Example workflows - - - Receive prompts and parameters - Call AI API (OpenAI, Replicate, Fal.ai) - Upscale, optimize, apply transformations - Upload to storage, update database - + + Simple AI image generation. Receives prompt and parameters, calls OpenAI DALL·E 3, post-processes result, uploads to storage. + +
+ +```mermaid +graph TB + A[generateImage] --> B[callDALLE3] + B --> C[optimizeImage] + C --> D[uploadToStorage] + D --> E[updateDatabase] +``` + +
+ - - Receive batch of generation requests - Process hundreds in parallel with rate limiting - Validate outputs, filter failures - Store results, notify completion - + **Coordinator pattern with rate limiting**. Receives batch of generation requests, coordinates parallel processing with configurable concurrency to respect API rate limits, validates outputs, stores results. + +
+ +```mermaid +graph TB + A[processBatch] --> B[coordinateGeneration] + B --> C[batchTriggerAndWait] + + C --> D[generateImage1] + C --> E[generateImage2] + C --> F[generateImageN] + + D --> G[validateResults] + E --> G + F --> G + + G --> H[storeResults] + H --> I[notifyCompletion] +``` + +
+ - - Create initial content with AI - Apply style transfer or enhancement - Increase quality/resolution - Compress and format for delivery - + **Coordinator pattern with sequential processing**. Generates initial content with AI, applies style transfer or enhancement, upscales resolution, optimizes and compresses for delivery. + +
+ +```mermaid +graph TB + A[processCreative] --> B[generateWithAI] + B --> C[applyStyleTransfer] + C --> D[upscaleResolution] + D --> E[optimizeAndCompress] + E --> F[uploadToStorage] +``` + +
+ - - Create AI content - Pause for human approval - Apply feedback if needed - Deploy approved content - + **Supervisor pattern with approval gate**. Generates AI content, pauses execution with wait.for to allow human review, applies feedback if needed, publishes approved content. + +
+ +```mermaid +graph TB + A[generateContent] --> B[createWithAI] + B --> C[wait.for approval] + C --> D{Approved?} + + D -->|Yes| E[publishContent] + D -->|Needs revision| F[applyFeedback] + F --> B +``` + +
diff --git a/docs/guides/use-cases/media-processing.mdx b/docs/guides/use-cases/media-processing.mdx index f1787502f7..ed5602fa3d 100644 --- a/docs/guides/use-cases/media-processing.mdx +++ b/docs/guides/use-cases/media-processing.mdx @@ -46,43 +46,146 @@ Show processing status updating live in your UI. Users see exactly where encodin Process hundreds of files simultaneously with configurable concurrency limits. Control resource usage without overwhelming infrastructure. -## Common workflows - -Here are some basic examples of media processing workflows: +## Example workflows - - - Receive upload webhook or storage event - Fetch source video file - Convert to multiple formats/resolutions with FFmpeg - Store results to S3/R2/storage - Update database, delete temp files - + + Simple video transcoding pipeline. Downloads video from storage, transcodes to multiple formats, extracts thumbnail, uploads results. + +
+ +```mermaid +graph TB + A[processVideo] --> B[downloadFromStorage] + B --> C[transcodeToHD] + C --> D[transcodeToSD] + D --> E[extractThumbnail] + E --> F[uploadToStorage] +``` + +
- - - Receive image from user or webhook - Resize, compress, apply filters - Generate thumbnails and multiple formats - Save to storage and update database - + + + **Router + Coordinator pattern**. Analyzes video metadata to determine source resolution, routes to appropriate transcoding preset, batch triggers parallel post-processing for thumbnails, preview clips, and chapter detection. + +
+ +```mermaid +graph TB + A[processVideoUpload] --> B[analyzeMetadata] + B --> C{Source
Resolution?} + + C -->|4K Source| D[transcode4K] + C -->|HD Source| E[transcodeHD] + C -->|SD Source| F[transcodeSD] + + D --> G[coordinatePostProcessing] + E --> G + F --> G + + G --> H[batchTriggerAndWait] + H --> I[extractThumbnails] + H --> J[generatePreview] + H --> K[detectChapters] + + I --> L[uploadToStorage] + J --> L + K --> L + + L --> M[notifyComplete] +``` + +
+
+ + + **Router + Coordinator pattern**. Analyzes image content to detect type, routes to specialized processing (background removal for products, face detection for portraits, scene analysis for landscapes), upscales with AI, batch triggers parallel variant generation. + +
+ +```mermaid +graph TB + A[processImageUpload] --> B[analyzeContent] + B --> C{Content
Type?} + + C -->|Product| D[removeBackground] + C -->|Portrait| E[detectFaces] + C -->|Landscape| F[analyzeScene] + + D --> G[upscaleWithAI] + E --> G + F --> G + + G --> H[batchTriggerAndWait] + H --> I[generateWebP] + H --> J[generateThumbnails] + H --> K[generateSocialCrops] + + I --> L[uploadToStorage] + J --> L + K --> L +``` + +
- - - Fetch audio file from storage - Convert speech to text - Apply effects, normalize, convert format - Store results and notify completion - + + + **Coordinator pattern**. Pre-processes raw audio with noise reduction and speaker diarization, batch triggers parallel tasks for transcription (Deepgram), audio enhancement, and chapter detection, aggregates results to generate show notes and publish. + +
+ +```mermaid +graph TB + A[processAudioUpload] --> B[cleanAudio] + B --> C[coordinateProcessing] + + C --> D[batchTriggerAndWait] + D --> E[transcribeWithDeepgram] + D --> F[enhanceAudio] + D --> G[detectChapters] + + E --> H[generateShowNotes] + F --> H + G --> H + + H --> I[publishToPlatforms] +``` + +
+ - - Get document via upload or webhook - Transform to PDF or extract text - Pull form data or run OCR - Save processed results - + **Router pattern with human-in-the-loop**. Detects file type and routes to appropriate processor, classifies document with AI to determine type (invoice/contract/receipt), extracts structured data fields, optionally pauses with wait.for for human approval. + +
+ +```mermaid +graph TB + A[processDocumentUpload] --> B[detectFileType] + + B -->|PDF| C[extractText] + B -->|Word/Excel| D[convertToPDF] + B -->|Image| E[runOCR] + + C --> F[classifyDocument] + D --> F + E --> F + + F -->|Invoice| G[extractLineItems] + F -->|Contract| H[extractClauses] + F -->|Receipt| I[extractExpenses] + + G --> J{Needs
Review?} + H --> J + I --> J + + J -->|Yes| K[wait.for approval] + J -->|No| L[processAndIntegrate] + K --> L +``` + +
From 62b3c9d36cdab9283641d3833a96f7ac6c8123a1 Mon Sep 17 00:00:00 2001 From: D-K-P <8297864+D-K-P@users.noreply.github.com> Date: Wed, 29 Oct 2025 14:02:57 +0000 Subject: [PATCH 04/13] Improved diagrams --- docs/guides/use-cases/data-processing-etl.mdx | 12 +++------ docs/guides/use-cases/marketing.mdx | 27 +++++++++++++------ docs/guides/use-cases/media-generation.mdx | 12 +++------ docs/guides/use-cases/media-processing.mdx | 12 +++------ 4 files changed, 28 insertions(+), 35 deletions(-) diff --git a/docs/guides/use-cases/data-processing-etl.mdx b/docs/guides/use-cases/data-processing-etl.mdx index 934c8428f0..e6a0b151a6 100644 --- a/docs/guides/use-cases/data-processing-etl.mdx +++ b/docs/guides/use-cases/data-processing-etl.mdx @@ -34,17 +34,11 @@ Build data pipelines that process large datasets without timeouts. Handle stream ## Why Trigger.dev for data processing -**Process datasets for hours without timeouts** +**Process datasets for hours without timeouts:** Handle multi-hour transformations, large file processing, or complete database exports. No execution time limits. -Handle multi-hour transformations, large file processing, or complete database exports. No execution time limits. +**Parallel processing with built-in rate limiting:** Process thousands of records simultaneously while respecting API rate limits. Scale efficiently without overwhelming downstream services. -**Parallel processing with built-in rate limiting** - -Process thousands of records simultaneously while respecting API rate limits. Scale efficiently without overwhelming downstream services. - -**Stream progress to your users in real-time** - -Show row-by-row processing status updating live in your dashboard. Users see exactly where processing is and how long remains. +**Stream progress to your users in real-time:** Show row-by-row processing status updating live in your dashboard. Users see exactly where processing is and how long remains. ## Example workflows diff --git a/docs/guides/use-cases/marketing.mdx b/docs/guides/use-cases/marketing.mdx index 8bfd997e83..cf8da95d5a 100644 --- a/docs/guides/use-cases/marketing.mdx +++ b/docs/guides/use-cases/marketing.mdx @@ -1,6 +1,6 @@ --- title: "Marketing workflows" -sidebarTitle: "Marketing workflows" +sidebarTitle: "Marketing" description: "Learn how to use Trigger.dev for marketing workflows, including drip campaigns, behavioral triggers, personalization engines, and AI-powered content workflows" --- @@ -38,17 +38,28 @@ Build marketing workflows from drip campaigns to real-time personalization. Hand ## Why Trigger.dev for marketing -**Delays without idle costs** +**Delays without idle costs:** Wait hours or weeks between steps. Pay only for active compute, not wait time. Perfect for drip campaigns and scheduled follow-ups. -Wait hours or weeks between steps. Pay only for active compute, not wait time. Perfect for drip campaigns and scheduled follow-ups. +**Guaranteed delivery:** Messages send exactly once, even after retries. Personalized content isn't regenerated on failure. -**Guaranteed delivery** +**Scale without limits:** Process thousands in parallel while respecting rate limits. Send to entire segments without overwhelming APIs. -Messages send exactly once, even after retries. Personalized content isn't regenerated on failure. +## Production use cases -**Scale without limits** - -Process thousands in parallel while respecting rate limits. Send to entire segments without overwhelming APIs. + + + [1-2 sentence description of what they built and the impact/result] + + + [1-2 sentence description of what they built and the impact/result] + + + [1-2 sentence description of what they built and the impact/result] + + + [1-2 sentence description of what they built and the impact/result] + + ## Example workflows diff --git a/docs/guides/use-cases/media-generation.mdx b/docs/guides/use-cases/media-generation.mdx index 7fa3d7be9e..a4ad497a39 100644 --- a/docs/guides/use-cases/media-generation.mdx +++ b/docs/guides/use-cases/media-generation.mdx @@ -34,17 +34,11 @@ Build AI media generation pipelines that handle unpredictable API latencies and ## Why Trigger.dev for AI media generation -**Pay only for active compute, not AI inference time** +**Pay only for active compute, not AI inference time:** Checkpoint-resume pauses during AI API calls. Generate content that takes minutes or hours without paying for idle inference time. -Checkpoint-resume pauses during AI API calls. Generate content that takes minutes or hours without paying for idle inference time. +**No timeout limits for long generations:** Handle generations that take minutes or hours without execution limits. Perfect for high-quality video synthesis and complex multi-modal workflows. -**No timeout limits for long generations** - -Handle generations that take minutes or hours without execution limits. Perfect for high-quality video synthesis and complex multi-modal workflows. - -**Human approval gates for brand safety** - -Add review steps before publishing AI-generated content. Pause workflows for human approval using waitpoint tokens. +**Human approval gates for brand safety:** Add review steps before publishing AI-generated content. Pause workflows for human approval using waitpoint tokens. ## Example workflows diff --git a/docs/guides/use-cases/media-processing.mdx b/docs/guides/use-cases/media-processing.mdx index ed5602fa3d..716c3afe2b 100644 --- a/docs/guides/use-cases/media-processing.mdx +++ b/docs/guides/use-cases/media-processing.mdx @@ -34,17 +34,11 @@ Build media processing pipelines that handle large files and long-running operat ## Why Trigger.dev for media processing -### Process multi-hour videos without timeouts +**Process multi-hour videos without timeouts:** Transcode videos, extract frames, or run CPU-intensive operations for hours. No execution time limits. -Transcode videos, extract frames, or run CPU-intensive operations for hours. No execution time limits. +**Stream progress to users in real-time:** Show processing status updating live in your UI. Users see exactly where encoding is and how long remains. -### Stream progress to users in real-time - -Show processing status updating live in your UI. Users see exactly where encoding is and how long remains. - -### Parallel processing with resource control - -Process hundreds of files simultaneously with configurable concurrency limits. Control resource usage without overwhelming infrastructure. +**Parallel processing with resource control:** Process hundreds of files simultaneously with configurable concurrency limits. Control resource usage without overwhelming infrastructure. ## Example workflows From 1c147276d75f13fd5a847c23e1b65500c9396eb6 Mon Sep 17 00:00:00 2001 From: D-K-P <8297864+D-K-P@users.noreply.github.com> Date: Wed, 29 Oct 2025 14:03:04 +0000 Subject: [PATCH 05/13] Added overview page --- docs/docs.json | 1 + docs/guides/use-cases/overview.mdx | 11 +++++++++++ 2 files changed, 12 insertions(+) create mode 100644 docs/guides/use-cases/overview.mdx diff --git a/docs/docs.json b/docs/docs.json index 5339d3a556..aeb5650ae9 100644 --- a/docs/docs.json +++ b/docs/docs.json @@ -336,6 +336,7 @@ { "group": "Use cases", "pages": [ + "guides/use-cases/overview", "guides/use-cases/data-processing-etl", "guides/use-cases/media-generation", "guides/use-cases/media-processing", diff --git a/docs/guides/use-cases/overview.mdx b/docs/guides/use-cases/overview.mdx new file mode 100644 index 0000000000..8bb9477abb --- /dev/null +++ b/docs/guides/use-cases/overview.mdx @@ -0,0 +1,11 @@ +--- +title: "Use cases" +sidebarTitle: "Overview" +description: "Explore common use cases for Trigger.dev including data processing, media workflows, marketing automation, and AI generation" +--- + +import UseCasesCards from "/snippets/use-cases-cards.mdx"; + +Trigger.dev handles workflows that traditional platforms struggle with: long-running operations, unpredictable API latencies, multi-hour processing, and complex orchestration patterns. Our platform provides no timeout limits, automatic retries, and real-time progress tracking built in. + + From 829eef2d7202be5f6fc81610920dd04221c8de7a Mon Sep 17 00:00:00 2001 From: D-K-P <8297864+D-K-P@users.noreply.github.com> Date: Wed, 29 Oct 2025 14:55:36 +0000 Subject: [PATCH 06/13] More copy + diagram updates --- docs/guides/use-cases/data-processing-etl.mdx | 35 +++++++++++--- docs/guides/use-cases/marketing.mdx | 31 +++++------- docs/guides/use-cases/media-generation.mdx | 42 +++++++++++----- docs/guides/use-cases/media-processing.mdx | 18 +++---- docs/snippets/use-cases-cards.mdx | 48 +++++-------------- 5 files changed, 92 insertions(+), 82 deletions(-) diff --git a/docs/guides/use-cases/data-processing-etl.mdx b/docs/guides/use-cases/data-processing-etl.mdx index e6a0b151a6..197915edcc 100644 --- a/docs/guides/use-cases/data-processing-etl.mdx +++ b/docs/guides/use-cases/data-processing-etl.mdx @@ -1,14 +1,14 @@ --- title: "Data processing & ETL workflows" sidebarTitle: "Data processing & ETL" -description: "Learn how to use Trigger.dev for data processing and ETL including web scraping, database synchronization, batch enrichment, and streaming analytics workflows" +description: "Learn how to use Trigger.dev for data processing and ETL (Extract, Transform, Load), including web scraping, database synchronization, batch enrichment and more." --- import UseCasesCards from "/snippets/use-cases-cards.mdx"; ## Overview -Build data pipelines that process large datasets without timeouts. Handle streaming analytics, batch enrichment, web scraping, database sync, and file processing with automatic retries and progress tracking. +Build complex data pipelines that process large datasets without timeouts. Handle streaming analytics, batch enrichment, web scraping, database sync, and file processing with automatic retries and progress tracking. ## Featured examples @@ -24,15 +24,15 @@ Build data pipelines that process large datasets without timeouts. Handle stream Scrape websites using BrowserBase and Puppeteer. - Run CRUD operations on Supabase database tables. + Trigger tasks from Supabase database webhooks. -## Why Trigger.dev for data processing +## Benefits of using Trigger.dev for data processing & ETL workflows **Process datasets for hours without timeouts:** Handle multi-hour transformations, large file processing, or complete database exports. No execution time limits. @@ -40,7 +40,28 @@ Build data pipelines that process large datasets without timeouts. Handle stream **Stream progress to your users in real-time:** Show row-by-row processing status updating live in your dashboard. Users see exactly where processing is and how long remains. -## Example workflows +## Production use cases + + + + +Read how MagicSchool AI uses Trigger.dev to generate insights from millions of student interactions. + + + + + +Read how Comp AI uses Trigger.dev to automate evidence collection at scale, powering their open source, AI-driven compliance platform. + + + + +Read how Midday use Trigger.dev to sync large volumes of bank transactions in their financial management platform. + + + + +## Example worfklow patterns diff --git a/docs/guides/use-cases/marketing.mdx b/docs/guides/use-cases/marketing.mdx index cf8da95d5a..a32d55db6a 100644 --- a/docs/guides/use-cases/marketing.mdx +++ b/docs/guides/use-cases/marketing.mdx @@ -8,7 +8,7 @@ import UseCasesCards from "/snippets/use-cases-cards.mdx"; ## Overview -Build marketing workflows from drip campaigns to real-time personalization. Handle multi-day sequences, behavioral triggers, dynamic content generation, and live analytics, all with automatic retries and progress tracking. +Build marketing workflows from drip campaigns to orchestrating multi-channel campaigns. Handle multi-day sequences, behavioral triggers, dynamic content generation, and live analytics, all with automatic retries and progress tracking. ## Featured examples @@ -25,18 +25,18 @@ Build marketing workflows from drip campaigns to real-time personalization. Hand icon="book" href="/guides/example-projects/product-image-generator" > - Transform product photos into professional marketing images with AI. + Transform product photos into professional marketing images using Replicate. - Add approval gates to workflows using waitpoint tokens. + Approve marketing content using a human-in-the-loop workflow. -## Why Trigger.dev for marketing +## Benefits of using Trigger.dev for marketing workflows **Delays without idle costs:** Wait hours or weeks between steps. Pay only for active compute, not wait time. Perfect for drip campaigns and scheduled follow-ups. @@ -46,25 +46,16 @@ Build marketing workflows from drip campaigns to real-time personalization. Hand ## Production use cases - - - [1-2 sentence description of what they built and the impact/result] - - - [1-2 sentence description of what they built and the impact/result] - - - [1-2 sentence description of what they built and the impact/result] - - - [1-2 sentence description of what they built and the impact/result] - - + + +Read how Icon uses Trigger.dev to process and generate thousands of videos per month for their AI-driven video creation platform. + + -## Example workflows +## Example worfklow patterns - + Simple drip campaign. User signs up, waits specified delay, sends personalized email, tracks engagement.
diff --git a/docs/guides/use-cases/media-generation.mdx b/docs/guides/use-cases/media-generation.mdx index a4ad497a39..db8321e47a 100644 --- a/docs/guides/use-cases/media-generation.mdx +++ b/docs/guides/use-cases/media-generation.mdx @@ -1,6 +1,6 @@ --- -title: "AI-powered media generation workflows" -sidebarTitle: "AI-powered media generation" +title: "AI media generation workflows" +sidebarTitle: "AI media generation" description: "Learn how to use Trigger.dev for AI media generation including image creation, video synthesis, audio generation, and multi-modal content workflows" --- @@ -13,9 +13,6 @@ Build AI media generation pipelines that handle unpredictable API latencies and ## Featured examples - - Generate images from text prompts using OpenAI's DALL·E 3. - Generate memes with DALL·E 3 and add human approval steps. + + Generate images from text prompts using the Vercel AI SDK. + -## Why Trigger.dev for AI media generation +## Benefits of using Trigger.dev for AI media generation workflows **Pay only for active compute, not AI inference time:** Checkpoint-resume pauses during AI API calls. Generate content that takes minutes or hours without paying for idle inference time. @@ -40,7 +44,24 @@ Build AI media generation pipelines that handle unpredictable API latencies and **Human approval gates for brand safety:** Add review steps before publishing AI-generated content. Pause workflows for human approval using waitpoint tokens. -## Example workflows +## Production use cases + + + + + +Read how Icon uses Trigger.dev to process and generate thousands of videos per month for their AI-driven video creation platform. + + + + + +Read how Papermark process thousands of documents per month using Trigger.dev. + + + + +## Example worfklow patterns @@ -50,10 +71,9 @@ Build AI media generation pipelines that handle unpredictable API latencies and ```mermaid graph TB - A[generateImage] --> B[callDALLE3] - B --> C[optimizeImage] - C --> D[uploadToStorage] - D --> E[updateDatabase] + A[generateImage] --> B[optimizeImage] + B --> C[uploadToStorage] + C --> D[updateDatabase] ```
diff --git a/docs/guides/use-cases/media-processing.mdx b/docs/guides/use-cases/media-processing.mdx index 716c3afe2b..05b1120735 100644 --- a/docs/guides/use-cases/media-processing.mdx +++ b/docs/guides/use-cases/media-processing.mdx @@ -1,7 +1,7 @@ --- title: "Media processing workflows" sidebarTitle: "Media processing" -description: "Learn how to use Trigger.dev for media processing including video transcoding, image optimization, audio transformation, and document conversion workflows" +description: "Learn how to use Trigger.dev for media processing including video transcoding, image optimization, audio transformation, and document conversion." --- import UseCasesCards from "/snippets/use-cases-cards.mdx"; @@ -17,11 +17,11 @@ Build media processing pipelines that handle large files and long-running operat Process videos and upload results to R2 storage using FFmpeg. - Transcribe audio files using Deepgram's speech recognition API. + Transform product photos into professional marketing images using Replicate. -## Why Trigger.dev for media processing +## Benefits of using Trigger.dev for media processing workflows **Process multi-hour videos without timeouts:** Transcode videos, extract frames, or run CPU-intensive operations for hours. No execution time limits. @@ -40,7 +40,7 @@ Build media processing pipelines that handle large files and long-running operat **Parallel processing with resource control:** Process hundreds of files simultaneously with configurable concurrency limits. Control resource usage without overwhelming infrastructure. -## Example workflows +## Example worfklow patterns @@ -60,7 +60,7 @@ graph TB - + **Router + Coordinator pattern**. Analyzes video metadata to determine source resolution, routes to appropriate transcoding preset, batch triggers parallel post-processing for thumbnails, preview clips, and chapter detection.
@@ -93,7 +93,7 @@ graph TB
- + **Router + Coordinator pattern**. Analyzes image content to detect type, routes to specialized processing (background removal for products, face detection for portraits, scene analysis for landscapes), upscales with AI, batch triggers parallel variant generation.
@@ -149,7 +149,7 @@ graph TB
- + **Router pattern with human-in-the-loop**. Detects file type and routes to appropriate processor, classifies document with AI to determine type (invoice/contract/receipt), extracts structured data fields, optionally pauses with wait.for for human approval.
diff --git a/docs/snippets/use-cases-cards.mdx b/docs/snippets/use-cases-cards.mdx index 2ea6744461..b43f8e5c9d 100644 --- a/docs/snippets/use-cases-cards.mdx +++ b/docs/snippets/use-cases-cards.mdx @@ -1,46 +1,24 @@ -## Use cases +## Featured use cases - Build intelligent workflows that combine AI processing with human oversight. From research - agents to customer support systems and content moderation pipelines. + Build complex data pipelines that process large datasets without timeouts. - - Transform and move data reliably at any scale. Whether streaming real-time analytics, enriching - customer records, or synchronizing large datasets. - - - Create scalable content workflows from single assets to thousands. Generate PDFs, videos, or - social media content with progress tracking and approval steps. + + Batch process videos, images, audio, and documents with no execution time limits. - Generate images, videos, and audio with AI models. Handle unpredictable API latencies, - manage rate limits, and stream progress in real-time. + Generate images, videos, audio, documents and other media using AI models. - - Process audio, video, and images efficiently with adaptive workflows. From real-time - transcription to batch optimization. - - - Orchestrate campaigns that respond dynamically to user behavior. Build drip sequences, - personalization engines, and analytics workflows. + + Build drip campaigns, create marketing content, and orchestrate multi-channel campaigns. From 77ab65b306e2d143e81d271e4eebeebca918dba0 Mon Sep 17 00:00:00 2001 From: D-K-P <8297864+D-K-P@users.noreply.github.com> Date: Wed, 29 Oct 2025 14:57:09 +0000 Subject: [PATCH 07/13] Improved diagram titles --- docs/guides/use-cases/data-processing-etl.mdx | 8 ++++---- docs/guides/use-cases/marketing.mdx | 8 ++++---- docs/guides/use-cases/media-generation.mdx | 8 ++++---- 3 files changed, 12 insertions(+), 12 deletions(-) diff --git a/docs/guides/use-cases/data-processing-etl.mdx b/docs/guides/use-cases/data-processing-etl.mdx index 197915edcc..806eb31a3b 100644 --- a/docs/guides/use-cases/data-processing-etl.mdx +++ b/docs/guides/use-cases/data-processing-etl.mdx @@ -64,7 +64,7 @@ Read how Midday use Trigger.dev to sync large volumes of bank transactions in th ## Example worfklow patterns - + Simple CSV import pipeline. Receives file upload, parses CSV rows, validates data, imports to database with progress tracking.
@@ -80,7 +80,7 @@ graph TB
- + **Coordinator pattern with parallel extraction**. Batch triggers parallel extraction from multiple sources (APIs, databases, S3), transforms and validates data, loads to data warehouse with monitoring.
@@ -105,7 +105,7 @@ graph TB
- + **Coordinator pattern with browser automation**. Launches headless browsers in parallel to scrape multiple pages, extracts structured data, cleans and normalizes content, stores in database.
@@ -130,7 +130,7 @@ graph TB
- + **Coordinator pattern with rate limiting**. Fetches records needing enrichment, batch triggers parallel API calls with configurable concurrency to respect rate limits, validates enriched data, updates database.
diff --git a/docs/guides/use-cases/marketing.mdx b/docs/guides/use-cases/marketing.mdx index a32d55db6a..a49a3030de 100644 --- a/docs/guides/use-cases/marketing.mdx +++ b/docs/guides/use-cases/marketing.mdx @@ -71,7 +71,7 @@ graph TB
- + **Router pattern with delay orchestration**. User action triggers campaign, router selects channel based on preferences (email/SMS/push), coordinates multi-day sequence with delays between messages, tracks engagement across channels.
@@ -84,7 +84,7 @@ graph TB D -->|Email| E[sendEmail1] D -->|SMS| F[sendSMS1] - D -->|Push| G[sendPush1] + D -->|Social media post| G[postToX1] E --> H[wait.for 2d] F --> H @@ -97,7 +97,7 @@ graph TB
- + **Supervisor pattern with approval gate**. Generates AI marketing content (images, copy, assets), pauses with wait.for for human review, applies revisions if needed, publishes to channels after approval.
@@ -116,7 +116,7 @@ graph TB
- + **Coordinator pattern with enrichment**. User completes survey, batch triggers parallel enrichment from CRM/analytics, analyzes and scores responses, updates customer profiles, triggers personalized follow-up campaigns.
diff --git a/docs/guides/use-cases/media-generation.mdx b/docs/guides/use-cases/media-generation.mdx index db8321e47a..cdaf2ae27b 100644 --- a/docs/guides/use-cases/media-generation.mdx +++ b/docs/guides/use-cases/media-generation.mdx @@ -64,7 +64,7 @@ Read how Papermark process thousands of documents per month using Trigger.dev. ## Example worfklow patterns - + Simple AI image generation. Receives prompt and parameters, calls OpenAI DALL·E 3, post-processes result, uploads to storage.
@@ -79,7 +79,7 @@ graph TB
- + **Coordinator pattern with rate limiting**. Receives batch of generation requests, coordinates parallel processing with configurable concurrency to respect API rate limits, validates outputs, stores results.
@@ -104,7 +104,7 @@ graph TB
- + **Coordinator pattern with sequential processing**. Generates initial content with AI, applies style transfer or enhancement, upscales resolution, optimizes and compresses for delivery.
@@ -121,7 +121,7 @@ graph TB
- + **Supervisor pattern with approval gate**. Generates AI content, pauses execution with wait.for to allow human review, applies feedback if needed, publishes approved content.
From 59ac4f730121d59dcb46ca5a0ef951d19369e6b6 Mon Sep 17 00:00:00 2001 From: D-K-P <8297864+D-K-P@users.noreply.github.com> Date: Wed, 29 Oct 2025 15:07:12 +0000 Subject: [PATCH 08/13] Further diagram improvements --- docs/guides/use-cases/marketing.mdx | 12 +++++--- docs/guides/use-cases/media-generation.mdx | 36 +++++++++++----------- docs/guides/use-cases/media-processing.mdx | 17 ++++++---- 3 files changed, 36 insertions(+), 29 deletions(-) diff --git a/docs/guides/use-cases/marketing.mdx b/docs/guides/use-cases/marketing.mdx index a49a3030de..3a9d4126cb 100644 --- a/docs/guides/use-cases/marketing.mdx +++ b/docs/guides/use-cases/marketing.mdx @@ -8,7 +8,7 @@ import UseCasesCards from "/snippets/use-cases-cards.mdx"; ## Overview -Build marketing workflows from drip campaigns to orchestrating multi-channel campaigns. Handle multi-day sequences, behavioral triggers, dynamic content generation, and live analytics, all with automatic retries and progress tracking. +Build marketing workflows from email drip sequences to orchestrating full multi-channel campaigns. Handle multi-day sequences, behavioral triggers, dynamic content generation, and build live analytics dashboards. ## Featured examples @@ -62,10 +62,12 @@ Read how Icon uses Trigger.dev to process and generate thousands of videos per m ```mermaid graph TB - A[startDripCampaign] --> B[fetchUserData] + A[userCreateAccount] --> B[sendWelcomeEmail] B --> C[wait.for 24h] - C --> D[sendPersonalizedEmail] - D --> E[trackEngagement] + C --> D[sendProductTipsEmail] + D --> E[wait.for 7d] + E --> F[sendFeedbackEmail] + ```
@@ -84,7 +86,7 @@ graph TB D -->|Email| E[sendEmail1] D -->|SMS| F[sendSMS1] - D -->|Social media post| G[postToX1] + D -->|Push| G[sendPush1] E --> H[wait.for 2d] F --> H diff --git a/docs/guides/use-cases/media-generation.mdx b/docs/guides/use-cases/media-generation.mdx index cdaf2ae27b..6c006aecd8 100644 --- a/docs/guides/use-cases/media-generation.mdx +++ b/docs/guides/use-cases/media-generation.mdx @@ -64,6 +64,24 @@ Read how Papermark process thousands of documents per month using Trigger.dev. ## Example worfklow patterns + + **Supervisor pattern with approval gate**. Generates AI content, pauses execution with wait.forToken to allow human review, applies feedback if needed, publishes approved content. + +
+ +```mermaid +graph TB + A[generateContent] --> B[createWithAI] + B --> C[wait.forToken approval] + C --> D{Approved?} + + D -->|Yes| E[publishContent] + D -->|Needs revision| F[applyFeedback] + F --> B +``` + +
+
Simple AI image generation. Receives prompt and parameters, calls OpenAI DALL·E 3, post-processes result, uploads to storage. @@ -121,24 +139,6 @@ graph TB
- - **Supervisor pattern with approval gate**. Generates AI content, pauses execution with wait.for to allow human review, applies feedback if needed, publishes approved content. - -
- -```mermaid -graph TB - A[generateContent] --> B[createWithAI] - B --> C[wait.for approval] - C --> D{Approved?} - - D -->|Yes| E[publishContent] - D -->|Needs revision| F[applyFeedback] - F --> B -``` - -
-
diff --git a/docs/guides/use-cases/media-processing.mdx b/docs/guides/use-cases/media-processing.mdx index 05b1120735..d30cd99a4f 100644 --- a/docs/guides/use-cases/media-processing.mdx +++ b/docs/guides/use-cases/media-processing.mdx @@ -44,17 +44,22 @@ Build media processing pipelines that handle large files and long-running operat - Simple video transcoding pipeline. Downloads video from storage, transcodes to multiple formats, extracts thumbnail, uploads results. + Simple video transcoding pipeline. Downloads video from storage, batch triggers parallel transcoding to multiple formats and thumbnail extraction, uploads all results.
```mermaid graph TB A[processVideo] --> B[downloadFromStorage] - B --> C[transcodeToHD] - C --> D[transcodeToSD] - D --> E[extractThumbnail] - E --> F[uploadToStorage] + B --> C[batchTriggerAndWait] + + C --> D[transcodeToHD] + C --> E[transcodeToSD] + C --> F[extractThumbnail] + + D --> G[uploadToStorage] + E --> G + F --> G ```
@@ -150,7 +155,7 @@ graph TB
- **Router pattern with human-in-the-loop**. Detects file type and routes to appropriate processor, classifies document with AI to determine type (invoice/contract/receipt), extracts structured data fields, optionally pauses with wait.for for human approval. + **Router pattern with human-in-the-loop**. Detects file type and routes to appropriate processor, classifies document with AI to determine type (invoice/contract/receipt), extracts structured data fields, optionally pauses with wait.forToken for human approval.
From 001e09493ff2ba3be4c50ae0651ddb43e22ba16d Mon Sep 17 00:00:00 2001 From: D-K-P <8297864+D-K-P@users.noreply.github.com> Date: Wed, 29 Oct 2025 15:18:21 +0000 Subject: [PATCH 09/13] Corrected workflow --- docs/guides/use-cases/marketing.mdx | 4 ++-- docs/guides/use-cases/media-processing.mdx | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/guides/use-cases/marketing.mdx b/docs/guides/use-cases/marketing.mdx index 3a9d4126cb..dc2545952d 100644 --- a/docs/guides/use-cases/marketing.mdx +++ b/docs/guides/use-cases/marketing.mdx @@ -100,14 +100,14 @@ graph TB - **Supervisor pattern with approval gate**. Generates AI marketing content (images, copy, assets), pauses with wait.for for human review, applies revisions if needed, publishes to channels after approval. + **Supervisor pattern with approval gate**. Generates AI marketing content (images, copy, assets), pauses with wait.forToken for human review, applies revisions if needed, publishes to channels after approval.
```mermaid graph TB A[createCampaignAssets] --> B[generateAIContent] - B --> C[wait.for approval] + B --> C[wait.forToken approval] C --> D{Approved?} D -->|Yes| E[publishToChannels] diff --git a/docs/guides/use-cases/media-processing.mdx b/docs/guides/use-cases/media-processing.mdx index d30cd99a4f..d0ab5520e7 100644 --- a/docs/guides/use-cases/media-processing.mdx +++ b/docs/guides/use-cases/media-processing.mdx @@ -179,7 +179,7 @@ graph TB H --> J I --> J - J -->|Yes| K[wait.for approval] + J -->|Yes| K[wait.forToken approval] J -->|No| L[processAndIntegrate] K --> L ``` From ce7e7ad95e0d17c7fb401efc7c8e3f6f5c5018c7 Mon Sep 17 00:00:00 2001 From: D-K-P <8297864+D-K-P@users.noreply.github.com> Date: Wed, 29 Oct 2025 17:16:54 +0000 Subject: [PATCH 10/13] Updated copy --- docs/guides/use-cases/marketing.mdx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/guides/use-cases/marketing.mdx b/docs/guides/use-cases/marketing.mdx index dc2545952d..3f1f879a7e 100644 --- a/docs/guides/use-cases/marketing.mdx +++ b/docs/guides/use-cases/marketing.mdx @@ -38,7 +38,7 @@ Build marketing workflows from email drip sequences to orchestrating full multi- ## Benefits of using Trigger.dev for marketing workflows -**Delays without idle costs:** Wait hours or weeks between steps. Pay only for active compute, not wait time. Perfect for drip campaigns and scheduled follow-ups. +**Delays without idle costs:** Wait hours or weeks between steps. Waits over 5 seconds are automatically checkpointed and don't count towards compute usage. Perfect for drip campaigns and scheduled follow-ups. **Guaranteed delivery:** Messages send exactly once, even after retries. Personalized content isn't regenerated on failure. From 74f71391ff1b819a1f9deccc29c95db7ba6a4224 Mon Sep 17 00:00:00 2001 From: D-K-P <8297864+D-K-P@users.noreply.github.com> Date: Wed, 29 Oct 2025 17:49:40 +0000 Subject: [PATCH 11/13] Typos --- docs/guides/use-cases/data-processing-etl.mdx | 2 +- docs/guides/use-cases/marketing.mdx | 2 +- docs/guides/use-cases/media-generation.mdx | 2 +- docs/guides/use-cases/media-processing.mdx | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/docs/guides/use-cases/data-processing-etl.mdx b/docs/guides/use-cases/data-processing-etl.mdx index 806eb31a3b..f6bfdd6581 100644 --- a/docs/guides/use-cases/data-processing-etl.mdx +++ b/docs/guides/use-cases/data-processing-etl.mdx @@ -61,7 +61,7 @@ Read how Midday use Trigger.dev to sync large volumes of bank transactions in th -## Example worfklow patterns +## Example workflow patterns diff --git a/docs/guides/use-cases/marketing.mdx b/docs/guides/use-cases/marketing.mdx index 3f1f879a7e..83d08d95d9 100644 --- a/docs/guides/use-cases/marketing.mdx +++ b/docs/guides/use-cases/marketing.mdx @@ -52,7 +52,7 @@ Read how Icon uses Trigger.dev to process and generate thousands of videos per m -## Example worfklow patterns +## Example workflow patterns diff --git a/docs/guides/use-cases/media-generation.mdx b/docs/guides/use-cases/media-generation.mdx index 6c006aecd8..919440df5f 100644 --- a/docs/guides/use-cases/media-generation.mdx +++ b/docs/guides/use-cases/media-generation.mdx @@ -61,7 +61,7 @@ Read how Papermark process thousands of documents per month using Trigger.dev. -## Example worfklow patterns +## Example workflow patterns diff --git a/docs/guides/use-cases/media-processing.mdx b/docs/guides/use-cases/media-processing.mdx index d0ab5520e7..e0f6aa8719 100644 --- a/docs/guides/use-cases/media-processing.mdx +++ b/docs/guides/use-cases/media-processing.mdx @@ -40,7 +40,7 @@ Build media processing pipelines that handle large files and long-running operat **Parallel processing with resource control:** Process hundreds of files simultaneously with configurable concurrency limits. Control resource usage without overwhelming infrastructure. -## Example worfklow patterns +## Example workflow patterns From 9e64af1cde212c30f6d469db7851b44fed0d0a8f Mon Sep 17 00:00:00 2001 From: D-K-P <8297864+D-K-P@users.noreply.github.com> Date: Wed, 29 Oct 2025 18:02:00 +0000 Subject: [PATCH 12/13] Updated intro file --- docs/guides/introduction.mdx | 37 +++--------------------------------- 1 file changed, 3 insertions(+), 34 deletions(-) diff --git a/docs/guides/introduction.mdx b/docs/guides/introduction.mdx index 2af52dc13b..13d6f96431 100644 --- a/docs/guides/introduction.mdx +++ b/docs/guides/introduction.mdx @@ -15,40 +15,7 @@ mode: "center" -## Use cases - -Explore comprehensive guides for common use cases and workflows: - - - - Transform and move data reliably at any scale. Whether streaming real-time analytics, enriching - customer records, or synchronizing large datasets. - - - Create scalable content workflows from single assets to thousands. Generate PDFs, videos, or - social media content with progress tracking and approval steps. - - - Process audio, video, and images efficiently with adaptive workflows. From real-time - transcription to batch optimization. - - - Orchestrate campaigns that respond dynamically to user behavior. Build drip sequences, - personalization engines, and analytics workflows. - - +import UseCasesCards from "/snippets/use-cases-cards.mdx"; ## Guides @@ -74,6 +41,8 @@ Get set up fast using our detailed walk-through guides. | [Using webhooks in Next.js](/guides/frameworks/nextjs-webhooks) | Trigger tasks from a webhook in Next.js | | [Using webhooks in Remix](/guides/frameworks/remix-webhooks) | Trigger tasks from a webhook in Remix | + + ## Example projects Example projects are full projects with example repos you can fork and use. These are a great way of learning how to use Trigger.dev in your projects. From 8184ea288ac0f5d45285193069ddff95bcdc953e Mon Sep 17 00:00:00 2001 From: D-K-P <8297864+D-K-P@users.noreply.github.com> Date: Thu, 30 Oct 2025 09:18:09 +0000 Subject: [PATCH 13/13] Reverted aiRunFilterService.server.ts --- .../v3/services/aiRunFilterService.server.ts | 60 +++++++------------ 1 file changed, 23 insertions(+), 37 deletions(-) diff --git a/apps/webapp/app/v3/services/aiRunFilterService.server.ts b/apps/webapp/app/v3/services/aiRunFilterService.server.ts index 0dcab5c04e..4ce12b9455 100644 --- a/apps/webapp/app/v3/services/aiRunFilterService.server.ts +++ b/apps/webapp/app/v3/services/aiRunFilterService.server.ts @@ -30,7 +30,7 @@ const AIFilterResponseSchema = z export interface QueryQueues { query( search: string | undefined, - type: "task" | "custom" | undefined, + type: "task" | "custom" | undefined ): Promise<{ queues: string[]; }>; @@ -39,14 +39,14 @@ export interface QueryQueues { export interface QueryVersions { query( versionPrefix: string | undefined, - isCurrent: boolean | undefined, + isCurrent: boolean | undefined ): Promise< | { - versions: string[]; - } + versions: string[]; + } | { - version: string; - } + version: string; + } >; } @@ -64,13 +64,13 @@ export interface QueryTasks { export type AIFilterResult = | { - success: true; - filters: TaskRunListSearchFilters; - } + success: true; + filters: TaskRunListSearchFilters; + } | { - success: false; - error: string; - }; + success: false; + error: string; + }; export class AIRunFilterService { constructor( @@ -80,7 +80,7 @@ export class AIRunFilterService { queryQueues: QueryQueues; queryTasks: QueryTasks; }, - private readonly model: LanguageModelV1 = openai("gpt-4o-mini"), + private readonly model: LanguageModelV1 = openai("gpt-4o-mini") ) {} async call(text: string, environmentId: string): Promise { @@ -92,9 +92,7 @@ export class AIRunFilterService { lookupTags: tool({ description: "Look up available tags in the environment", parameters: z.object({ - query: z.string().optional().describe( - "Optional search query to filter tags", - ), + query: z.string().optional().describe("Optional search query to filter tags"), }), execute: async ({ query }) => { return await this.queryFns.queryTags.query(query); @@ -112,27 +110,22 @@ export class AIRunFilterService { .string() .optional() .describe( - "Optional version name to filter (e.g. 20250701.1), it uses contains to compare. Don't pass `latest` or `current`, the query has to be in the reverse date format specified. Leave out to get all recent versions.", + "Optional version name to filter (e.g. 20250701.1), it uses contains to compare. Don't pass `latest` or `current`, the query has to be in the reverse date format specified. Leave out to get all recent versions." ), }), execute: async ({ versionPrefix, isCurrent }) => { - return await this.queryFns.queryVersions.query( - versionPrefix, - isCurrent, - ); + return await this.queryFns.queryVersions.query(versionPrefix, isCurrent); }, }), lookupQueues: tool({ description: "Look up available queues in the environment", parameters: z.object({ - query: z.string().optional().describe( - "Optional search query to filter queues", - ), + query: z.string().optional().describe("Optional search query to filter queues"), type: z .enum(["task", "custom"]) .optional() .describe( - "Filter by queue type, only do this if the user specifies it explicitly.", + "Filter by queue type, only do this if the user specifies it explicitly." ), }), execute: async ({ query, type }) => { @@ -149,15 +142,12 @@ export class AIRunFilterService { }), }, maxSteps: 5, - system: - `You are an AI assistant that converts natural language descriptions into structured filter parameters for a task run filtering system. + system: `You are an AI assistant that converts natural language descriptions into structured filter parameters for a task run filtering system. Available filter options: - statuses: Array of run statuses (PENDING, EXECUTING, COMPLETED_SUCCESSFULLY, COMPLETED_WITH_ERRORS, CANCELED, TIMED_OUT, CRASHED, etc.) - period: Time period string (e.g., "1h", "7d", "30d", "1y") - - from/to: ISO date string. Today's date is ${ - new Date().toISOString() - }, if they only specify a day use the current month. If they don't specify a year use the current year. If they don't specify a time of day use midnight. + - from/to: ISO date string. Today's date is ${new Date().toISOString()}, if they only specify a day use the current month. If they don't specify a year use the current year. If they don't specify a time of day use midnight. - tags: Array of tag names to filter by. Use the lookupTags tool to get the tags. - tasks: Array of task identifiers to filter by. Use the lookupTasks tool to get the tasks. - machines: Array of machine presets (micro, small, small-2x, medium, large, xlarge, etc.) @@ -169,7 +159,7 @@ export class AIRunFilterService { - scheduleId: Specific schedule ID to filter by - Common workflows to recognize: + Common patterns to recognize: - "failed runs" → statuses: ["COMPLETED_WITH_ERRORS", "CRASHED", "TIMED_OUT", "SYSTEM_FAILURE"]. - "runs not dequeued yet" → statuses: ["PENDING", "PENDING_VERSION", "DELAYED"] - If they say "only failed" then only use "COMPLETED_WITH_ERRORS". @@ -242,9 +232,7 @@ export class AIRunFilterService { } // Validate the filters against the schema to catch any issues - const validationResult = AIFilters.safeParse( - result.experimental_output.filters, - ); + const validationResult = AIFilters.safeParse(result.experimental_output.filters); if (!validationResult.success) { logger.error("AI filter validation failed", { errors: validationResult.error.errors, @@ -264,9 +252,7 @@ export class AIRunFilterService { from: validationResult.data.from ? new Date(validationResult.data.from).getTime() : undefined, - to: validationResult.data.to - ? new Date(validationResult.data.to).getTime() - : undefined, + to: validationResult.data.to ? new Date(validationResult.data.to).getTime() : undefined, }, }; } catch (error) {