From 4c86122574bc20be30233bd6b7d73d24ddd00eaa Mon Sep 17 00:00:00 2001 From: Waleed Date: Mon, 4 May 2026 00:11:41 -0700 Subject: [PATCH 01/15] fix(table): return 400 instead of 500 for malformed sort/filter input (#4425) * fix(table): return 400 instead of 500 for malformed sort/filter input * fix(table): revert default-case throw to plain Error and update JSDoc @throws tags --- .../sim/app/api/table/[tableId]/rows/route.ts | 14 +++++++- .../app/api/v1/tables/[tableId]/rows/route.ts | 14 +++++++- apps/sim/lib/table/sql.ts | 35 +++++++++++++------ 3 files changed, 51 insertions(+), 12 deletions(-) diff --git a/apps/sim/app/api/table/[tableId]/rows/route.ts b/apps/sim/app/api/table/[tableId]/rows/route.ts index 8c69ef55a38..9b0076a127d 100644 --- a/apps/sim/app/api/table/[tableId]/rows/route.ts +++ b/apps/sim/app/api/table/[tableId]/rows/route.ts @@ -30,7 +30,7 @@ import { validateRowData, validateRowSize, } from '@/lib/table' -import { buildFilterClause, buildSortClause } from '@/lib/table/sql' +import { buildFilterClause, buildSortClause, TableQueryValidationError } from '@/lib/table/sql' import { accessError, checkAccess } from '@/app/api/table/utils' const logger = createLogger('TableRowsAPI') @@ -336,6 +336,10 @@ export const GET = withRouteHandler( return validationErrorResponse(error) } + if (error instanceof TableQueryValidationError) { + return NextResponse.json({ error: error.message }, { status: 400 }) + } + logger.error(`[${requestId}] Error querying rows:`, error) return NextResponse.json({ error: 'Failed to query rows' }, { status: 500 }) } @@ -421,6 +425,10 @@ export const PUT = withRouteHandler( return validationErrorResponse(error) } + if (error instanceof TableQueryValidationError) { + return NextResponse.json({ error: error.message }, { status: 400 }) + } + const errorMessage = toError(error).message if ( @@ -520,6 +528,10 @@ export const DELETE = withRouteHandler( return validationErrorResponse(error) } + if (error instanceof TableQueryValidationError) { + return NextResponse.json({ error: error.message }, { status: 400 }) + } + const errorMessage = toError(error).message if (errorMessage.includes('Filter is required')) { diff --git a/apps/sim/app/api/v1/tables/[tableId]/rows/route.ts b/apps/sim/app/api/v1/tables/[tableId]/rows/route.ts index a6bb5613cad..d4d9c448837 100644 --- a/apps/sim/app/api/v1/tables/[tableId]/rows/route.ts +++ b/apps/sim/app/api/v1/tables/[tableId]/rows/route.ts @@ -30,7 +30,7 @@ import { validateRowData, validateRowSize, } from '@/lib/table' -import { buildFilterClause, buildSortClause } from '@/lib/table/sql' +import { buildFilterClause, buildSortClause, TableQueryValidationError } from '@/lib/table/sql' import { accessError, checkAccess } from '@/app/api/table/utils' import { checkRateLimit, @@ -240,6 +240,10 @@ export const GET = withRouteHandler(async (request: NextRequest, context: TableR const validationResponse = validationErrorResponseFromError(error) if (validationResponse) return validationResponse + if (error instanceof TableQueryValidationError) { + return NextResponse.json({ error: error.message }, { status: 400 }) + } + logger.error(`[${requestId}] Error querying rows:`, error) return NextResponse.json({ error: 'Failed to query rows' }, { status: 500 }) } @@ -407,6 +411,10 @@ export const PUT = withRouteHandler(async (request: NextRequest, context: TableR const validationResponse = validationErrorResponseFromError(error) if (validationResponse) return validationResponse + if (error instanceof TableQueryValidationError) { + return NextResponse.json({ error: error.message }, { status: 400 }) + } + const errorMessage = toError(error).message if ( @@ -500,6 +508,10 @@ export const DELETE = withRouteHandler( const validationResponse = validationErrorResponseFromError(error) if (validationResponse) return validationResponse + if (error instanceof TableQueryValidationError) { + return NextResponse.json({ error: error.message }, { status: 400 }) + } + const errorMessage = toError(error).message if (errorMessage.includes('Filter is required')) { diff --git a/apps/sim/lib/table/sql.ts b/apps/sim/lib/table/sql.ts index d2004175f44..f854d2b5237 100644 --- a/apps/sim/lib/table/sql.ts +++ b/apps/sim/lib/table/sql.ts @@ -10,6 +10,17 @@ import { sql } from 'drizzle-orm' import { NAME_PATTERN } from './constants' import type { ColumnDefinition, ConditionOperators, Filter, JsonValue, Sort } from './types' +/** + * Error thrown when caller-supplied filter or sort input is malformed. + * Routes should map this to HTTP 400 with the message preserved. + */ +export class TableQueryValidationError extends Error { + constructor(message: string) { + super(message) + this.name = 'TableQueryValidationError' + } +} + /** * Whitelist of allowed operators for query filtering. * Only these operators can be used in filter conditions. @@ -41,7 +52,7 @@ const ALLOWED_OPERATORS = new Set([ * @param filter - Filter object with field conditions and logical operators * @param tableName - Table name for the query (e.g., 'user_table_rows') * @returns SQL WHERE clause or undefined if no filter specified - * @throws Error if field name is invalid or operator is not allowed + * @throws {TableQueryValidationError} if field name is invalid or operator is not allowed * * @example * // Simple equality @@ -110,7 +121,7 @@ export function buildFilterClause(filter: Filter, tableName: string): SQL | unde * @param tableName - Table name for the query (e.g., 'user_table_rows') * @param columns - Optional column definitions for type-aware sorting * @returns SQL ORDER BY clause or undefined if no sort specified - * @throws Error if field name is invalid + * @throws {TableQueryValidationError} if field name or sort direction is invalid * * @example * buildSortClause({ name: 'asc', age: 'desc' }, 'user_table_rows') @@ -133,7 +144,9 @@ export function buildSortClause( validateFieldName(field) if (direction !== 'asc' && direction !== 'desc') { - throw new Error(`Invalid sort direction "${direction}". Must be "asc" or "desc".`) + throw new TableQueryValidationError( + `Invalid sort direction "${direction}". Must be "asc" or "desc".` + ) } const columnType = columnTypeMap.get(field) @@ -148,15 +161,15 @@ export function buildSortClause( * Field names must match the NAME_PATTERN (alphanumeric + underscore, starting with letter/underscore). * * @param field - The field name to validate - * @throws Error if field name is invalid + * @throws {TableQueryValidationError} if field name is invalid */ function validateFieldName(field: string): void { if (!field || typeof field !== 'string') { - throw new Error('Field name must be a non-empty string') + throw new TableQueryValidationError('Field name must be a non-empty string') } if (!NAME_PATTERN.test(field)) { - throw new Error( + throw new TableQueryValidationError( `Invalid field name "${field}". Field names must start with a letter or underscore, followed by alphanumeric characters or underscores.` ) } @@ -166,11 +179,11 @@ function validateFieldName(field: string): void { * Validates an operator to ensure it's in the allowed list. * * @param operator - The operator to validate - * @throws Error if operator is not allowed + * @throws {TableQueryValidationError} if operator is not allowed */ function validateOperator(operator: string): void { if (!ALLOWED_OPERATORS.has(operator)) { - throw new Error( + throw new TableQueryValidationError( `Invalid operator "${operator}". Allowed operators: ${Array.from(ALLOWED_OPERATORS).join(', ')}` ) } @@ -190,7 +203,7 @@ function validateOperator(operator: string): void { * object with operators like $eq, $gt, $in, etc. * @returns Array of SQL condition fragments. Multiple conditions are returned * when the condition object contains multiple operators. - * @throws Error if field name is invalid or operator is not allowed + * @throws {TableQueryValidationError} if field name is invalid or operator is not allowed */ function buildFieldCondition( tableName: string, @@ -260,7 +273,9 @@ function buildFieldCondition( break default: - // This should never happen due to validateOperator, but added for completeness + // This should never happen due to validateOperator, but added for completeness. + // Throw a plain Error (→ 500) since reaching this default means the switch + // and ALLOWED_OPERATORS have drifted — that's a programmer error, not a caller error. throw new Error(`Unsupported operator: ${op}`) } } From af8dfbd57ef23865446525cb5b2c1853c43ed170 Mon Sep 17 00:00:00 2001 From: Waleed Date: Mon, 4 May 2026 10:20:06 -0700 Subject: [PATCH 02/15] fix(knowledge): revert column width multipliers that misaligned Name header (#4427) --- .../workspace/[workspaceId]/knowledge/[id]/base.tsx | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/apps/sim/app/workspace/[workspaceId]/knowledge/[id]/base.tsx b/apps/sim/app/workspace/[workspaceId]/knowledge/[id]/base.tsx index 2bf56b163f0..99364fa215b 100644 --- a/apps/sim/app/workspace/[workspaceId]/knowledge/[id]/base.tsx +++ b/apps/sim/app/workspace/[workspaceId]/knowledge/[id]/base.tsx @@ -84,11 +84,11 @@ const DOCUMENTS_PER_PAGE = 50 const DOCUMENT_COLUMNS: ResourceColumn[] = [ { id: 'name', header: 'Name' }, - { id: 'size', header: 'Size', widthMultiplier: 0.625 }, - { id: 'tokens', header: 'Tokens', widthMultiplier: 0.625 }, - { id: 'chunks', header: 'Chunks', widthMultiplier: 0.5 }, - { id: 'uploaded', header: 'Uploaded', widthMultiplier: 0.75 }, - { id: 'status', header: 'Status', widthMultiplier: 0.875 }, + { id: 'size', header: 'Size' }, + { id: 'tokens', header: 'Tokens' }, + { id: 'chunks', header: 'Chunks' }, + { id: 'uploaded', header: 'Uploaded' }, + { id: 'status', header: 'Status' }, { id: 'tags', header: 'Tags' }, ] From 5d53847c2e6cec28878ea62d0af2dc8bf19d1a22 Mon Sep 17 00:00:00 2001 From: Waleed Date: Mon, 4 May 2026 10:55:25 -0700 Subject: [PATCH 03/15] fix(executor): strip childTraceSpans from block state before LLM tool calls (#4428) * fix(executor): strip childTraceSpans from block state before LLM tool calls * fix(executor): return stripped output so orchestrator setBlockOutput stays clean --- apps/sim/executor/execution/block-executor.ts | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/apps/sim/executor/execution/block-executor.ts b/apps/sim/executor/execution/block-executor.ts index 340b2aab01a..9a3c22e8529 100644 --- a/apps/sim/executor/execution/block-executor.ts +++ b/apps/sim/executor/execution/block-executor.ts @@ -187,7 +187,8 @@ export class BlockExecutor { } } - this.state.setBlockOutput(node.id, normalizedOutput, duration) + const { childTraceSpans: _traces, ...outputForState } = normalizedOutput + this.state.setBlockOutput(node.id, outputForState as NormalizedBlockOutput, duration) if (!isSentinel && blockLog) { const childWorkflowInstanceId = @@ -211,7 +212,7 @@ export class BlockExecutor { ) } - return normalizedOutput + return outputForState as NormalizedBlockOutput } catch (error) { return await this.handleBlockError( error, @@ -270,7 +271,6 @@ export class BlockExecutor { } if (ChildWorkflowError.isChildWorkflowError(error)) { - errorOutput.childTraceSpans = error.childTraceSpans errorOutput.childWorkflowName = error.childWorkflowName if (error.childWorkflowSnapshotId) { errorOutput.childWorkflowSnapshotId = error.childWorkflowSnapshotId @@ -287,8 +287,8 @@ export class BlockExecutor { blockLog.input = this.sanitizeInputsForLog(input) blockLog.output = filterOutputForLog(block.metadata?.id || '', errorOutput, { block }) - if (errorOutput.childTraceSpans && Array.isArray(errorOutput.childTraceSpans)) { - blockLog.childTraceSpans = errorOutput.childTraceSpans + if (ChildWorkflowError.isChildWorkflowError(error) && error.childTraceSpans.length > 0) { + blockLog.childTraceSpans = error.childTraceSpans } } From 57dc745bab0aab9f40e0debfdec5103b673f1b19 Mon Sep 17 00:00:00 2001 From: Waleed Date: Mon, 4 May 2026 11:33:31 -0700 Subject: [PATCH 04/15] feat(knowledge): expose Cohere reranker controls (#4429) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat(knowledge): expose Cohere reranker controls on knowledge block Add a self-hosted Cohere API key field (mirroring the agent block's hosted-key pattern), a configurable reranker input pool size (1-100), and surface meta.warnings from Cohere rerank responses via logger.warn. All new contract fields are optional and nullable for full backwards compatibility. Co-Authored-By: Claude Opus 4.7 * fix(knowledge): address PR feedback on Cohere reranker controls - Drop required:true on apiKey field — server has BYOK→env→rotation fallback chain, so self-hosted users with COHERE_API_KEY env should not be blocked - Drop .min(1) on rerankerApiKey contract field so empty strings coerce to undefined via the transform (matches the existing query field pattern) - Log a warning when rerankerInputCount is clamped up to topK so users notice their setting was overridden Co-Authored-By: Claude Opus 4.7 * feat(knowledge): mirror agent block API key visibility for Cohere reranker Restore required:true on the Cohere API Key field and hide it server-side via a new NEXT_PUBLIC_COHERE_CONFIGURED public env flag — same pattern the Agent block uses for Azure (NEXT_PUBLIC_AZURE_CONFIGURED). Self-hosters who set COHERE_API_KEY in their environment also set NEXT_PUBLIC_COHERE_CONFIGURED=true, which removes the field from the UI; everyone else sees a required field. Co-Authored-By: Claude Opus 4.7 * fix(knowledge): treat empty rerankerInputCount as unset An empty string from the Documents Sent to Reranker input passed the undefined/null guard, so Number('') = 0 → clamped to 1, sending only 1 document to the reranker instead of falling back to the 4× topK auto default. Add the empty-string check to the guard. Co-Authored-By: Claude Opus 4.7 --------- Co-authored-by: Claude Opus 4.7 --- apps/sim/.env.example | 2 ++ apps/sim/app/api/knowledge/search/route.ts | 25 ++++++++++--- apps/sim/blocks/blocks/knowledge.ts | 28 +++++++++++++++ apps/sim/blocks/utils.ts | 28 ++++++++++++++- .../sim/lib/api/contracts/knowledge/search.ts | 18 ++++++++++ apps/sim/lib/core/config/env.ts | 2 ++ apps/sim/lib/core/config/feature-flags.ts | 8 +++++ apps/sim/lib/knowledge/reranker.ts | 35 +++++++++++++++++-- apps/sim/tools/knowledge/search.ts | 31 +++++++++++++++- helm/sim/values.yaml | 6 ++++ 10 files changed, 175 insertions(+), 8 deletions(-) diff --git a/apps/sim/.env.example b/apps/sim/.env.example index 6f7aa473666..f554797ea1e 100644 --- a/apps/sim/.env.example +++ b/apps/sim/.env.example @@ -35,6 +35,8 @@ API_ENCRYPTION_KEY=your_api_encryption_key # Use `openssl rand -hex 32` to gener # AZURE_ANTHROPIC_API_KEY= # Azure Anthropic API key # AZURE_ANTHROPIC_API_VERSION= # Azure Anthropic API version (e.g., 2023-06-01) # NEXT_PUBLIC_AZURE_CONFIGURED=true # Set when Azure credentials are pre-configured above. Hides endpoint/key/version fields in Agent block UI. +# COHERE_API_KEY= # Cohere API key for the Knowledge block reranker (rerank-v4.0-pro/-fast, rerank-v3.5). Alternatively set COHERE_API_KEY_1/2/3 for rotation. +# NEXT_PUBLIC_COHERE_CONFIGURED=true # Set when COHERE_API_KEY (or rotation keys) are pre-configured above. Hides the Cohere API Key field on the Knowledge block UI. # Admin API (Optional - for self-hosted GitOps) # ADMIN_API_KEY= # Use `openssl rand -hex 32` to generate. Enables admin API for workflow export/import. diff --git a/apps/sim/app/api/knowledge/search/route.ts b/apps/sim/app/api/knowledge/search/route.ts index 13f4625a2cb..94c09f6c138 100644 --- a/apps/sim/app/api/knowledge/search/route.ts +++ b/apps/sim/app/api/knowledge/search/route.ts @@ -247,9 +247,21 @@ export const POST = withRouteHandler(async (request: NextRequest) => { const hasFilters = structuredFilters && structuredFilters.length > 0 - /** Oversample candidates when reranking so the reranker has more to choose from. - * Cap at 100 to bound Cohere request cost (1 search unit = ≤100 docs). */ - const candidateTopK = useReranker ? Math.min(100, validatedData.topK * 4) : validatedData.topK + /** Oversample vector results when reranking so the reranker has more to choose from. + * Cap at 100 to bound Cohere request cost (1 search unit = ≤100 docs). When the caller + * supplies `rerankerInputCount`, honor it but never let it drop below `topK` + * (which would defeat the purpose) or exceed 100 (which would split into >1 search units). */ + const rawInputCount = validatedData.rerankerInputCount + if (useReranker && rawInputCount !== undefined && rawInputCount < validatedData.topK) { + logger.warn( + `[${requestId}] rerankerInputCount (${rawInputCount}) is below topK (${validatedData.topK}); raising to topK` + ) + } + const candidateTopK = useReranker + ? rawInputCount !== undefined + ? Math.min(100, Math.max(validatedData.topK, rawInputCount)) + : Math.min(100, validatedData.topK * 4) + : validatedData.topK if (!hasQuery && hasFilters) { results = await handleTagOnlySearch({ @@ -300,7 +312,12 @@ export const POST = withRouteHandler(async (request: NextRequest) => { const { results: ranked, isBYOK } = await rerank( validatedData.query!, results.map((r) => ({ id: r.id, text: r.content })), - { model: rerankerModel, topN: validatedData.topK, workspaceId } + { + model: rerankerModel, + topN: validatedData.topK, + workspaceId, + apiKey: validatedData.rerankerApiKey, + } ) rerankBilled = true rerankIsBYOK = isBYOK diff --git a/apps/sim/blocks/blocks/knowledge.ts b/apps/sim/blocks/blocks/knowledge.ts index 3d17e9cb402..f8a92235b2e 100644 --- a/apps/sim/blocks/blocks/knowledge.ts +++ b/apps/sim/blocks/blocks/knowledge.ts @@ -1,6 +1,7 @@ import { PackageSearchIcon } from '@/components/icons' import { DEFAULT_RERANKER_MODEL, SUPPORTED_RERANKER_MODELS } from '@/lib/knowledge/reranker-models' import type { BlockConfig } from '@/blocks/types' +import { getCohereRerankerApiKeyCondition } from '@/blocks/utils' export const KnowledgeBlock: BlockConfig = { type: 'knowledge', @@ -105,6 +106,28 @@ export const KnowledgeBlock: BlockConfig = { and: { field: 'rerankerEnabled', value: true }, }, }, + { + id: 'rerankerInputCount', + title: 'Documents Sent to Reranker', + type: 'short-input', + placeholder: 'Auto (4× results, capped at 100)', + mode: 'advanced', + condition: { + field: 'operation', + value: 'search', + and: { field: 'rerankerEnabled', value: true }, + }, + }, + { + id: 'apiKey', + title: 'Cohere API Key', + type: 'short-input', + placeholder: 'Enter your Cohere API key', + password: true, + connectionDroppable: false, + required: true, + condition: getCohereRerankerApiKeyCondition(), + }, // --- List Documents --- { @@ -419,6 +442,11 @@ export const KnowledgeBlock: BlockConfig = { tagFilters: { type: 'string', description: 'Tag filter criteria' }, rerankerEnabled: { type: 'boolean', description: 'Apply Cohere reranking to search results' }, rerankerModel: { type: 'string', description: 'Cohere rerank model identifier' }, + rerankerInputCount: { + type: 'number', + description: 'Number of vector results sent to the Cohere reranker (1–100)', + }, + apiKey: { type: 'string', description: 'Cohere API key (self-hosted only)' }, documentTags: { type: 'string', description: 'Document tags' }, chunkSearch: { type: 'string', description: 'Search filter for chunks' }, chunkEnabledFilter: { type: 'string', description: 'Filter chunks by enabled status' }, diff --git a/apps/sim/blocks/utils.ts b/apps/sim/blocks/utils.ts index b70ca7af504..c22596b34cd 100644 --- a/apps/sim/blocks/utils.ts +++ b/apps/sim/blocks/utils.ts @@ -1,5 +1,10 @@ import { toError } from '@sim/utils/errors' -import { isAzureConfigured, isHosted, isOllamaConfigured } from '@/lib/core/config/feature-flags' +import { + isAzureConfigured, + isCohereConfigured, + isHosted, + isOllamaConfigured, +} from '@/lib/core/config/feature-flags' import { getScopesForService } from '@/lib/oauth/utils' import { buildCanonicalIndex } from '@/lib/workflows/subblocks/visibility' import type { BlockOutput, OutputFieldDefinition, SubBlockConfig } from '@/blocks/types' @@ -184,6 +189,27 @@ export function getApiKeyCondition() { } } +/** + * Visibility condition for the Cohere reranker API key field on the Knowledge block. + * Hidden on hosted Sim (platform supplies the key via workspace BYOK or rotating env keys) + * and on self-hosted deployments that have set `NEXT_PUBLIC_COHERE_CONFIGURED=true` to + * indicate `COHERE_API_KEY` is pre-configured server-side. Otherwise shown (and required) + * whenever reranking is enabled for a search operation, mirroring the agent block's + * `getApiKeyCondition` pattern. + */ +export function getCohereRerankerApiKeyCondition() { + return () => { + if (isHosted || isCohereConfigured) { + return { field: 'operation', value: '__never_show__' } + } + return { + field: 'operation', + value: 'search', + and: { field: 'rerankerEnabled', value: true }, + } + } +} + /** * Returns the standard provider credential subblocks used by LLM-based blocks. * This includes: Vertex AI OAuth, API Key, Azure (OpenAI + Anthropic), Vertex AI config, and Bedrock config. diff --git a/apps/sim/lib/api/contracts/knowledge/search.ts b/apps/sim/lib/api/contracts/knowledge/search.ts index 291257e7b16..ea1dff75ce0 100644 --- a/apps/sim/lib/api/contracts/knowledge/search.ts +++ b/apps/sim/lib/api/contracts/knowledge/search.ts @@ -36,6 +36,24 @@ export const knowledgeSearchBodySchema = z .transform((val) => val || undefined), rerankerEnabled: z.boolean().optional().default(false), rerankerModel: rerankerModelSchema.optional().default(DEFAULT_RERANKER_MODEL), + /** + * Number of vector results sent to Cohere as the documents array for reranking. Capped at 100 + * so each rerank call stays within a single Cohere search unit (1 query × ≤100 docs); see + * `RERANK_MODEL_PRICING` in `providers/models.ts`. + */ + rerankerInputCount: z + .number() + .int('rerankerInputCount must be an integer') + .min(1, 'rerankerInputCount must be at least 1') + .max(100, 'rerankerInputCount cannot exceed 100') + .optional() + .nullable() + .transform((val) => val ?? undefined), + rerankerApiKey: z + .string() + .optional() + .nullable() + .transform((val) => val || undefined), }) .refine( (data) => { diff --git a/apps/sim/lib/core/config/env.ts b/apps/sim/lib/core/config/env.ts index 969324591b0..14bf33ce5d4 100644 --- a/apps/sim/lib/core/config/env.ts +++ b/apps/sim/lib/core/config/env.ts @@ -430,6 +430,7 @@ export const env = createEnv({ NEXT_PUBLIC_E2B_ENABLED: z.string().optional(), NEXT_PUBLIC_BEDROCK_DEFAULT_CREDENTIALS: z.string().optional(), // Hide Bedrock credential fields when deployment uses AWS default credential chain (IAM roles, instance profiles, ECS task roles, IRSA) NEXT_PUBLIC_AZURE_CONFIGURED: z.string().optional(), // Hide Azure credential fields when endpoint/key/version are pre-configured server-side + NEXT_PUBLIC_COHERE_CONFIGURED: z.string().optional(), // Hide Cohere API key field on Knowledge block when COHERE_API_KEY is pre-configured server-side NEXT_PUBLIC_COPILOT_TRAINING_ENABLED: z.string().optional(), NEXT_PUBLIC_ENABLE_PLAYGROUND: z.string().optional(), // Enable component playground at /playground NEXT_PUBLIC_DOCUMENTATION_URL: z.string().url().optional(), // Custom documentation URL @@ -496,6 +497,7 @@ export const env = createEnv({ NEXT_PUBLIC_E2B_ENABLED: process.env.NEXT_PUBLIC_E2B_ENABLED, NEXT_PUBLIC_BEDROCK_DEFAULT_CREDENTIALS: process.env.NEXT_PUBLIC_BEDROCK_DEFAULT_CREDENTIALS, NEXT_PUBLIC_AZURE_CONFIGURED: process.env.NEXT_PUBLIC_AZURE_CONFIGURED, + NEXT_PUBLIC_COHERE_CONFIGURED: process.env.NEXT_PUBLIC_COHERE_CONFIGURED, NEXT_PUBLIC_COPILOT_TRAINING_ENABLED: process.env.NEXT_PUBLIC_COPILOT_TRAINING_ENABLED, NEXT_PUBLIC_ENABLE_PLAYGROUND: process.env.NEXT_PUBLIC_ENABLE_PLAYGROUND, NEXT_PUBLIC_POSTHOG_ENABLED: process.env.NEXT_PUBLIC_POSTHOG_ENABLED, diff --git a/apps/sim/lib/core/config/feature-flags.ts b/apps/sim/lib/core/config/feature-flags.ts index c593c2b3eda..3a69af74fd1 100644 --- a/apps/sim/lib/core/config/feature-flags.ts +++ b/apps/sim/lib/core/config/feature-flags.ts @@ -156,6 +156,14 @@ export const isOllamaConfigured = Boolean(env.OLLAMA_URL) */ export const isAzureConfigured = isTruthy(getEnv('NEXT_PUBLIC_AZURE_CONFIGURED')) +/** + * Whether a Cohere API key is pre-configured server-side for the Knowledge block reranker + * (`COHERE_API_KEY` or `COHERE_API_KEY_1/2/3`). When true, the Cohere API Key field is hidden + * in the Knowledge block UI. + * Set NEXT_PUBLIC_COHERE_CONFIGURED=true in self-hosted deployments that ship a Cohere key. + */ +export const isCohereConfigured = isTruthy(getEnv('NEXT_PUBLIC_COHERE_CONFIGURED')) + /** * Are invitations disabled globally * When true, workspace invitations are disabled for all users diff --git a/apps/sim/lib/knowledge/reranker.ts b/apps/sim/lib/knowledge/reranker.ts index 54b2ae02c91..b1bebc11aa8 100644 --- a/apps/sim/lib/knowledge/reranker.ts +++ b/apps/sim/lib/knowledge/reranker.ts @@ -2,6 +2,7 @@ import { createLogger } from '@sim/logger' import { getBYOKKey } from '@/lib/api-key/byok' import { getRotatingApiKey } from '@/lib/core/config/api-keys' import { env } from '@/lib/core/config/env' +import { isHosted } from '@/lib/core/config/feature-flags' import { isRetryableError, retryWithExponentialBackoff } from '@/lib/knowledge/documents/utils' import { DEFAULT_RERANKER_MODEL, @@ -56,8 +57,18 @@ class RerankAPIError extends Error { } async function resolveCohereKey( - workspaceId?: string | null + workspaceId?: string | null, + userApiKey?: string ): Promise<{ apiKey: string; isBYOK: boolean }> { + /** + * Mirrors the agent block hosted-key pattern (`injectHostedKeyIfNeeded`): + * on self-hosted the user-supplied key from the block field flows through + * unchanged; on hosted Sim we always source the key from workspace BYOK or + * platform env, so any user-supplied value is ignored. + */ + if (!isHosted && userApiKey) { + return { apiKey: userApiKey, isBYOK: false } + } if (workspaceId) { const byokResult = await getBYOKKey(workspaceId, 'cohere') if (byokResult) { @@ -77,8 +88,19 @@ async function resolveCohereKey( } } +/** + * Subset of Cohere v2/rerank response fields we read. + * Reference: https://docs.cohere.com/v2/reference/rerank + * - `results[].index` maps back to the position in the documents we sent. + * - `results[].relevance_score` is normalized 0–1. + * - `meta.warnings` is documented as an array of strings; we surface them in logs + * so issues like document truncation don't disappear silently. + */ interface CohereRerankResponse { results: Array<{ index: number; relevance_score: number }> + meta?: { + warnings?: string[] + } } /** @@ -92,6 +114,8 @@ export async function rerank( model: string topN?: number workspaceId?: string | null + /** User-supplied Cohere key from the Knowledge block field. Honored only on self-hosted. */ + apiKey?: string } ): Promise> { if (items.length === 0) return { results: [], isBYOK: false } @@ -100,7 +124,7 @@ export async function rerank( throw new Error(`Unsupported reranker model: ${options.model}`) } - const { apiKey, isBYOK } = await resolveCohereKey(options.workspaceId) + const { apiKey, isBYOK } = await resolveCohereKey(options.workspaceId, options.apiKey) const cappedItems = items.length > MAX_DOCUMENTS_PER_RERANK ? items.slice(0, MAX_DOCUMENTS_PER_RERANK) : items if (items.length > MAX_DOCUMENTS_PER_RERANK) { @@ -151,6 +175,13 @@ export async function rerank( } ) + if (response.meta?.warnings && response.meta.warnings.length > 0) { + logger.warn('Cohere rerank returned warnings', { + model: options.model, + warnings: response.meta.warnings, + }) + } + return { results: response.results .filter((r) => r.index >= 0 && r.index < cappedItems.length) diff --git a/apps/sim/tools/knowledge/search.ts b/apps/sim/tools/knowledge/search.ts index 7f0ee99e933..09da5193704 100644 --- a/apps/sim/tools/knowledge/search.ts +++ b/apps/sim/tools/knowledge/search.ts @@ -55,6 +55,19 @@ export const knowledgeSearchTool: ToolConfig = { description: 'Cohere rerank model to use (one of: rerank-v4.0-pro, rerank-v4.0-fast, rerank-v3.5)', }, + rerankerInputCount: { + type: 'number', + required: false, + visibility: 'user-only', + description: + 'Number of vector results sent to the Cohere reranker (1–100). Defaults to topK × 4 capped at 100.', + }, + apiKey: { + type: 'string', + required: false, + visibility: 'user-only', + description: 'Cohere API key for reranker (self-hosted deployments only)', + }, }, schemaEnrichment: { @@ -84,13 +97,29 @@ export const knowledgeSearchTool: ToolConfig = { typeof params.rerankerModel === 'string' && params.rerankerModel.length > 0 ? params.rerankerModel : DEFAULT_RERANKER_MODEL + const rerankerApiKey = + typeof params.apiKey === 'string' && params.apiKey.length > 0 ? params.apiKey : undefined + const rawInputCount = + params.rerankerInputCount !== undefined && + params.rerankerInputCount !== null && + params.rerankerInputCount !== '' + ? Number(params.rerankerInputCount) + : Number.NaN + const rerankerInputCount = Number.isFinite(rawInputCount) + ? Math.max(1, Math.min(100, Math.floor(rawInputCount))) + : undefined const requestBody = { knowledgeBaseIds, query: params.query, topK: params.topK ? Math.max(1, Math.min(100, Number(params.topK))) : 10, ...(structuredFilters.length > 0 && { tagFilters: structuredFilters }), - ...(rerankerEnabled && { rerankerEnabled: true, rerankerModel }), + ...(rerankerEnabled && { + rerankerEnabled: true, + rerankerModel, + ...(rerankerInputCount !== undefined && { rerankerInputCount }), + ...(rerankerApiKey && { rerankerApiKey }), + }), ...(workflowId && { workflowId }), } diff --git a/helm/sim/values.yaml b/helm/sim/values.yaml index 97fbeba5761..d2fd5c0ee11 100644 --- a/helm/sim/values.yaml +++ b/helm/sim/values.yaml @@ -275,6 +275,12 @@ app: # in the Agent block UI — users just pick an Azure model and run. NEXT_PUBLIC_AZURE_CONFIGURED: "" # Set to "true" to hide Azure credential fields + # Cohere Reranker (Knowledge block) + # Set COHERE_API_KEY (or COHERE_API_KEY_1/2/3 for rotation) and NEXT_PUBLIC_COHERE_CONFIGURED=true + # to pre-configure the Cohere reranker server-side. When configured, the Cohere API Key field is + # hidden in the Knowledge block UI. + NEXT_PUBLIC_COHERE_CONFIGURED: "" # Set to "true" to hide the Cohere API Key field on the Knowledge block + # AWS S3 Cloud Storage Configuration (optional - for file storage) # If configured, files will be stored in S3 instead of local storage AWS_REGION: "" # AWS region (e.g., "us-east-1") From 3af6c25b970b1518ca0c6fd882265c1a63004244 Mon Sep 17 00:00:00 2001 From: Waleed Date: Mon, 4 May 2026 12:13:30 -0700 Subject: [PATCH 05/15] fix(mothership): catch draft restore errors instead of crashing /home (#4433) * fix(mothership): catch draft restore errors instead of crashing /home Wrap the mount-time draft restore in try/catch with clearDraft on throw, and coerce text to a string in the useState initializer. A corrupt entry in mothership-drafts:v1 localStorage previously took down the entire workspace via the error boundary. * fix(mothership): defer state writes and log restore failures Build the restored state in locals first and only apply on success so a partial throw can't leave stale contexts in the UI with the draft already cleared. Switch the empty catch to logger.error so corrupt-draft incidents surface in production logs. --- .../home/components/user-input/user-input.tsx | 40 +++++++++++++------ 1 file changed, 28 insertions(+), 12 deletions(-) diff --git a/apps/sim/app/workspace/[workspaceId]/home/components/user-input/user-input.tsx b/apps/sim/app/workspace/[workspaceId]/home/components/user-input/user-input.tsx index 6a83a037c1b..a60c8ebff23 100644 --- a/apps/sim/app/workspace/[workspaceId]/home/components/user-input/user-input.tsx +++ b/apps/sim/app/workspace/[workspaceId]/home/components/user-input/user-input.tsx @@ -11,6 +11,7 @@ import { useRef, useState, } from 'react' +import { createLogger } from '@sim/logger' import { Paperclip } from 'lucide-react' import { useParams } from 'next/navigation' import { Button, Tooltip } from '@/components/emcn' @@ -58,6 +59,8 @@ import type { ChatContext } from '@/stores/panel' export type { FileAttachmentForApi } from '@/app/workspace/[workspaceId]/home/types' +const logger = createLogger('UserInput') + function getCaretAnchor( textarea: HTMLTextAreaElement, caretPos: number @@ -148,7 +151,8 @@ export const UserInput = forwardRef(function Us const [value, setValue] = useState(() => { if (defaultValue) return defaultValue if (!draftScopeKey) return '' - return useMothershipDraftsStore.getState().drafts[draftScopeKey]?.text ?? '' + const text = useMothershipDraftsStore.getState().drafts[draftScopeKey]?.text + return typeof text === 'string' ? text : '' }) const overlayRef = useRef(null) const plusMenuRef = useRef(null) @@ -189,14 +193,17 @@ export const UserInput = forwardRef(function Us useEffect(() => { if (hasRestoredDraftRef.current || !draftScopeKey) return hasRestoredDraftRef.current = true - const draft = useMothershipDraftsStore.getState().drafts[draftScopeKey] - if (!draft) return - if (draft.contexts?.length) { - contextManagement.setSelectedContexts(draft.contexts) - } - if (draft.fileAttachments?.length) { - files.restoreAttachedFiles( - draft.fileAttachments.map((a) => ({ + let restoredContexts: ChatContext[] | null = null + let restoredFiles: AttachedFile[] | null = null + let caretText: string | null = null + try { + const draft = useMothershipDraftsStore.getState().drafts[draftScopeKey] + if (!draft) return + if (draft.contexts?.length) { + restoredContexts = draft.contexts + } + if (draft.fileAttachments?.length) { + restoredFiles = draft.fileAttachments.map((a) => ({ id: a.id, name: a.filename, size: a.size, @@ -205,13 +212,22 @@ export const UserInput = forwardRef(function Us key: a.key, uploading: false, })) - ) + } + if (typeof draft.text === 'string' && draft.text.length > 0) { + caretText = draft.text + } + } catch (err) { + logger.error('Failed to read draft, clearing', { err }) + useMothershipDraftsStore.getState().clearDraft(draftScopeKey) + return } - if (draft.text) { + if (restoredContexts) contextManagement.setSelectedContexts(restoredContexts) + if (restoredFiles) files.restoreAttachedFiles(restoredFiles) + if (caretText !== null) { const textarea = textareaRef.current if (textarea) { textarea.focus() - textarea.setSelectionRange(draft.text.length, draft.text.length) + textarea.setSelectionRange(caretText.length, caretText.length) } } }, []) // eslint-disable-line react-hooks/exhaustive-deps -- intentional mount-only restore From 578fc505ece13e704420b987734e0e6994270f7a Mon Sep 17 00:00:00 2001 From: Waleed Date: Mon, 4 May 2026 12:44:16 -0700 Subject: [PATCH 06/15] fix(mothership): stop persisting log resources from get_workflow_logs and self-heal stale log panel entries (#4424) * fix(mothership): stop persisting log resources from get_workflow_logs and self-heal stale log panel entries * fix(mothership): skip retries on 404 in useLogDetail for instant self-heal * fix(mothership): simplify onNotFoundRef sync to inline assignment --- .../resource-content/resource-content.tsx | 27 ++++++++++++++++--- .../mothership-view/mothership-view.tsx | 1 + .../[workspaceId]/home/hooks/use-chat.ts | 12 +++++++++ apps/sim/hooks/queries/logs.ts | 3 +++ apps/sim/lib/copilot/resources/extraction.ts | 15 ----------- 5 files changed, 39 insertions(+), 19 deletions(-) diff --git a/apps/sim/app/workspace/[workspaceId]/home/components/mothership-view/components/resource-content/resource-content.tsx b/apps/sim/app/workspace/[workspaceId]/home/components/mothership-view/components/resource-content/resource-content.tsx index 46f78e1f89e..33d4ade4e56 100644 --- a/apps/sim/app/workspace/[workspaceId]/home/components/mothership-view/components/resource-content/resource-content.tsx +++ b/apps/sim/app/workspace/[workspaceId]/home/components/mothership-view/components/resource-content/resource-content.tsx @@ -1,6 +1,6 @@ 'use client' -import { lazy, memo, Suspense, useEffect, useMemo } from 'react' +import { lazy, memo, Suspense, useEffect, useMemo, useRef } from 'react' import { createLogger } from '@sim/logger' import { Square } from 'lucide-react' import { useRouter } from 'next/navigation' @@ -13,6 +13,7 @@ import { SquareArrowUpRight, WorkflowX, } from '@/components/emcn/icons' +import { isApiClientError } from '@/lib/api/client/errors' import type { FilePreviewSession } from '@/lib/copilot/request/session' import { cancelRunToolExecution, @@ -70,6 +71,7 @@ interface ResourceContentProps { previewSession?: FilePreviewSession | null genericResourceData?: GenericResourceData previewContextKey?: string + onNotFound?: (resourceId: string) => void } /** @@ -86,6 +88,7 @@ export const ResourceContent = memo(function ResourceContent({ previewSession, genericResourceData, previewContextKey, + onNotFound, }: ResourceContentProps) { const streamFileName = previewSession?.fileName || 'file.md' const syntheticFile = useMemo(() => { @@ -179,7 +182,13 @@ export const ResourceContent = memo(function ResourceContent({ return case 'log': - return + return ( + onNotFound(resource.id) : undefined} + /> + ) case 'generic': return ( @@ -618,10 +627,20 @@ function EmbeddedFolder({ workspaceId, folderId }: EmbeddedFolderProps) { interface EmbeddedLogProps { logId: string + onNotFound?: () => void } -function EmbeddedLog({ logId }: EmbeddedLogProps) { - const { data: log, isLoading } = useLogDetail(logId) +function EmbeddedLog({ logId, onNotFound }: EmbeddedLogProps) { + const { data: log, isLoading, error } = useLogDetail(logId) + + const onNotFoundRef = useRef(onNotFound) + onNotFoundRef.current = onNotFound + + useEffect(() => { + if (isApiClientError(error) && error.status === 404) { + onNotFoundRef.current?.() + } + }, [error]) if (isLoading) return LOADING_SKELETON diff --git a/apps/sim/app/workspace/[workspaceId]/home/components/mothership-view/mothership-view.tsx b/apps/sim/app/workspace/[workspaceId]/home/components/mothership-view/mothership-view.tsx index fcfb08ff948..4eb7227c850 100644 --- a/apps/sim/app/workspace/[workspaceId]/home/components/mothership-view/mothership-view.tsx +++ b/apps/sim/app/workspace/[workspaceId]/home/components/mothership-view/mothership-view.tsx @@ -128,6 +128,7 @@ export const MothershipView = memo( previewSession={previewForActive} genericResourceData={active.type === 'generic' ? genericResourceData : undefined} previewContextKey={chatId} + onNotFound={(resourceId) => onRemoveResource('log', resourceId)} /> ) : (
diff --git a/apps/sim/app/workspace/[workspaceId]/home/hooks/use-chat.ts b/apps/sim/app/workspace/[workspaceId]/home/hooks/use-chat.ts index b4dccd4d4df..0799193d148 100644 --- a/apps/sim/app/workspace/[workspaceId]/home/hooks/use-chat.ts +++ b/apps/sim/app/workspace/[workspaceId]/home/hooks/use-chat.ts @@ -1420,6 +1420,18 @@ export function useChat( const removeResource = useCallback((resourceType: MothershipResourceType, resourceId: string) => { setResources((prev) => prev.filter((r) => !(r.type === resourceType && r.id === resourceId))) setActiveResourceId((prev) => (prev === resourceId ? null : prev)) + + const persistChatId = chatIdRef.current ?? selectedChatIdRef.current + if (persistChatId) { + // boundary-raw-fetch: fire-and-forget side-effect; intentionally avoids requestJson's response parsing/throw semantics so a transient failure cannot interrupt the caller + fetch('/api/mothership/chat/resources', { + method: 'DELETE', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ chatId: persistChatId, resourceType, resourceId }), + }).catch((err) => { + logger.warn('Failed to persist resource removal', err) + }) + } }, []) const reorderResources = useCallback((newOrder: MothershipResource[]) => { diff --git a/apps/sim/hooks/queries/logs.ts b/apps/sim/hooks/queries/logs.ts index bd5b0e5e695..d2f4bbfa4e9 100644 --- a/apps/sim/hooks/queries/logs.ts +++ b/apps/sim/hooks/queries/logs.ts @@ -7,6 +7,7 @@ import { useQuery, useQueryClient, } from '@tanstack/react-query' +import { isApiClientError } from '@/lib/api/client/errors' import { requestJson } from '@/lib/api/client/request' import { cancelWorkflowExecutionContract, @@ -194,6 +195,8 @@ export function useLogDetail(logId: string | undefined, options?: UseLogDetailOp enabled: Boolean(logId) && (options?.enabled ?? true), refetchInterval: options?.refetchInterval ?? false, staleTime: 30 * 1000, + retry: (failureCount, err) => + !(isApiClientError(err) && err.status === 404) && failureCount < 3, }) } diff --git a/apps/sim/lib/copilot/resources/extraction.ts b/apps/sim/lib/copilot/resources/extraction.ts index 29ca644a21a..6cba5a3bee0 100644 --- a/apps/sim/lib/copilot/resources/extraction.ts +++ b/apps/sim/lib/copilot/resources/extraction.ts @@ -7,7 +7,6 @@ import { FunctionExecute, GenerateImage, GenerateVisualization, - GetWorkflowLogs, Knowledge, KnowledgeBase, UserTable, @@ -30,7 +29,6 @@ const RESOURCE_TOOL_NAMES: Set = new Set([ Knowledge.id, GenerateVisualization.id, GenerateImage.id, - GetWorkflowLogs.id, ]) export function isResourceToolName(toolName: string): boolean { @@ -214,19 +212,6 @@ export function extractResourcesFromToolResult( return resources } - case GetWorkflowLogs.id: { - const entries = Array.isArray(output) ? output : Array.isArray(result.data) ? result.data : [] - const resources: ChatResource[] = [] - for (const entry of entries) { - const rec = asRecord(entry) - const logId = rec.id as string | undefined - if (logId) { - resources.push({ type: 'log', id: logId, title: 'Log' }) - } - } - return resources - } - default: return [] } From 2f90e41274eaaf9d34b0ed709a20f1e9af4c586d Mon Sep 17 00:00:00 2001 From: Waleed Date: Mon, 4 May 2026 12:46:04 -0700 Subject: [PATCH 07/15] feat(mothership): restore attachment previews on draft and add video support (#4435) * feat(mothership): restore attachment previews on draft and add video support * fix(mothership): icon fallback behind video preview --- .../components/chat-message-attachments.tsx | 31 ++++++++++++++++--- .../components/attached-files-list.tsx | 21 +++++++++++-- .../home/components/user-input/user-input.tsx | 3 ++ .../[workspaceId]/home/hooks/use-chat.ts | 5 ++- .../user-input/hooks/use-file-attachments.ts | 5 ++- .../lib/copilot/chat/attachment-preview.ts | 9 ++++++ apps/sim/lib/copilot/chat/display-message.ts | 5 ++- 7 files changed, 66 insertions(+), 13 deletions(-) create mode 100644 apps/sim/lib/copilot/chat/attachment-preview.ts diff --git a/apps/sim/app/workspace/[workspaceId]/home/components/chat-message-attachments.tsx b/apps/sim/app/workspace/[workspaceId]/home/components/chat-message-attachments.tsx index e39d3a0dd37..d430fb14e61 100644 --- a/apps/sim/app/workspace/[workspaceId]/home/components/chat-message-attachments.tsx +++ b/apps/sim/app/workspace/[workspaceId]/home/components/chat-message-attachments.tsx @@ -30,13 +30,36 @@ export function ChatMessageAttachments(props: { )} > {attachments.map((att) => { - const isImage = att.media_type.startsWith('image/') - return isImage && att.previewUrl ? ( + if (!att.previewUrl) { + return ( + + ) + } + const isVideo = att.media_type.startsWith('video/') + if (isVideo) { + const Icon = getDocumentIcon(att.media_type, att.filename) + return ( +
+
+ +
+
+ ) + } + return (
{att.filename}
- ) : ( - ) })}
diff --git a/apps/sim/app/workspace/[workspaceId]/home/components/user-input/components/attached-files-list.tsx b/apps/sim/app/workspace/[workspaceId]/home/components/user-input/components/attached-files-list.tsx index 6046107e6d6..3dc97d208ac 100644 --- a/apps/sim/app/workspace/[workspaceId]/home/components/user-input/components/attached-files-list.tsx +++ b/apps/sim/app/workspace/[workspaceId]/home/components/user-input/components/attached-files-list.tsx @@ -22,7 +22,8 @@ export const AttachedFilesList = React.memo(function AttachedFilesList({ return (
{attachedFiles.map((file) => { - const isImage = file.type.startsWith('image/') + const isVideo = file.type.startsWith('video/') + const hasPreview = Boolean(file.previewUrl) return ( @@ -30,7 +31,23 @@ export const AttachedFilesList = React.memo(function AttachedFilesList({ className='group relative h-[56px] w-[56px] flex-shrink-0 cursor-pointer overflow-hidden rounded-[8px] border border-[var(--border-1)] bg-[var(--surface-5)] hover:bg-[var(--surface-4)]' onClick={() => onFileClick(file)} > - {isImage && file.previewUrl ? ( + {hasPreview && isVideo ? ( + <> +
+ {(() => { + const Icon = getDocumentIcon(file.type, file.name) + return + })()} +
+
- { - activeTabRef.current = tab - onActiveTabChange?.(tab) - }} - /> + )} diff --git a/apps/sim/app/workspace/[workspaceId]/logs/components/log-row-context-menu/log-row-context-menu.tsx b/apps/sim/app/workspace/[workspaceId]/logs/components/log-row-context-menu/log-row-context-menu.tsx index 0ce7aafa9f2..d8435907db8 100644 --- a/apps/sim/app/workspace/[workspaceId]/logs/components/log-row-context-menu/log-row-context-menu.tsx +++ b/apps/sim/app/workspace/[workspaceId]/logs/components/log-row-context-menu/log-row-context-menu.tsx @@ -15,13 +15,13 @@ import { SquareArrowUpRight, X, } from '@/components/emcn' -import type { WorkflowLog } from '@/stores/logs/filters/types' +import type { WorkflowLogSummary } from '@/lib/api/contracts/logs' interface LogRowContextMenuProps { isOpen: boolean position: { x: number; y: number } onClose: () => void - log: WorkflowLog | null + log: WorkflowLogSummary | null onCopyExecutionId: () => void onCopyLink: () => void onOpenWorkflow: () => void diff --git a/apps/sim/app/workspace/[workspaceId]/logs/components/logs-list/logs-list.tsx b/apps/sim/app/workspace/[workspaceId]/logs/components/logs-list/logs-list.tsx index e8dd1d912be..7bf398a99ca 100644 --- a/apps/sim/app/workspace/[workspaceId]/logs/components/logs-list/logs-list.tsx +++ b/apps/sim/app/workspace/[workspaceId]/logs/components/logs-list/logs-list.tsx @@ -6,6 +6,7 @@ import { ArrowUpRight } from 'lucide-react' import Link from 'next/link' import { List, type RowComponentProps, useListRef } from 'react-window' import { Badge, buttonVariants, Loader } from '@/components/emcn' +import type { WorkflowLogSummary } from '@/lib/api/contracts/logs' import { dollarsToCredits } from '@/lib/billing/credits/conversion' import { cn } from '@/lib/core/utils/cn' import { workflowBorderColor } from '@/lib/workspaces/colors' @@ -18,16 +19,15 @@ import { StatusBadge, TriggerBadge, } from '@/app/workspace/[workspaceId]/logs/utils' -import type { WorkflowLog } from '@/stores/logs/filters/types' const LOG_ROW_HEIGHT = 44 as const interface LogRowProps { - log: WorkflowLog + log: WorkflowLogSummary isSelected: boolean - onClick: (log: WorkflowLog) => void - onHover?: (log: WorkflowLog) => void - onContextMenu?: (e: React.MouseEvent, log: WorkflowLog) => void + onClick: (log: WorkflowLogSummary) => void + onHover?: (log: WorkflowLogSummary) => void + onContextMenu?: (e: React.MouseEvent, log: WorkflowLogSummary) => void selectedRowRef: React.RefObject | null } @@ -56,7 +56,7 @@ const LogRow = memo( ? '#ec4899' : isDeletedWorkflow ? DELETED_WORKFLOW_COLOR - : log.workflow?.color + : (log.workflow?.color ?? undefined) const handleClick = () => onClick(log) const handleMouseEnter = () => onHover?.(log) @@ -164,11 +164,11 @@ const LogRow = memo( ) interface RowProps { - logs: WorkflowLog[] + logs: WorkflowLogSummary[] selectedLogId: string | null - onLogClick: (log: WorkflowLog) => void - onLogHover?: (log: WorkflowLog) => void - onLogContextMenu?: (e: React.MouseEvent, log: WorkflowLog) => void + onLogClick: (log: WorkflowLogSummary) => void + onLogHover?: (log: WorkflowLogSummary) => void + onLogContextMenu?: (e: React.MouseEvent, log: WorkflowLogSummary) => void selectedRowRef: React.RefObject isFetchingNextPage: boolean loaderRef: React.RefObject @@ -225,11 +225,11 @@ function Row({ } export interface LogsListProps { - logs: WorkflowLog[] + logs: WorkflowLogSummary[] selectedLogId: string | null - onLogClick: (log: WorkflowLog) => void - onLogHover?: (log: WorkflowLog) => void - onLogContextMenu?: (e: React.MouseEvent, log: WorkflowLog) => void + onLogClick: (log: WorkflowLogSummary) => void + onLogHover?: (log: WorkflowLogSummary) => void + onLogContextMenu?: (e: React.MouseEvent, log: WorkflowLogSummary) => void selectedRowRef: React.RefObject hasNextPage: boolean isFetchingNextPage: boolean diff --git a/apps/sim/app/workspace/[workspaceId]/logs/logs.tsx b/apps/sim/app/workspace/[workspaceId]/logs/logs.tsx index de2dce93250..5c3b3b0af66 100644 --- a/apps/sim/app/workspace/[workspaceId]/logs/logs.tsx +++ b/apps/sim/app/workspace/[workspaceId]/logs/logs.tsx @@ -16,6 +16,11 @@ import { RefreshCw, toast, } from '@/components/emcn' +import type { + WorkflowLogDetail, + WorkflowLogRow, + WorkflowLogSummary, +} from '@/lib/api/contracts/logs' import { dollarsToCredits } from '@/lib/billing/credits/conversion' import { cn } from '@/lib/core/utils/cn' import { @@ -50,12 +55,14 @@ import type { Suggestion } from '@/app/workspace/[workspaceId]/logs/types' import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider' import { getBlock } from '@/blocks/registry' import { useFolderMap, useFolders } from '@/hooks/queries/folders' +import type { LogSortBy, LogSortOrder } from '@/hooks/queries/logs' import { fetchLogDetail, logKeys, prefetchLogDetail, useCancelExecution, useDashboardStats, + useLogByExecutionId, useLogDetail, useLogsList, useRetryExecution, @@ -63,7 +70,6 @@ import { import { useWorkflowMap, useWorkflows } from '@/hooks/queries/workflows' import { useDebounce } from '@/hooks/use-debounce' import { useFilterStore } from '@/stores/logs/filters/store' -import type { WorkflowLog } from '@/stores/logs/filters/types' import { CORE_TRIGGER_TYPES } from '@/stores/logs/filters/types' import { Dashboard, @@ -86,6 +92,7 @@ import { } from './utils' const LOGS_PER_PAGE = 50 as const +const SORTABLE_COLUMNS: readonly LogSortBy[] = ['date', 'duration', 'cost', 'status'] as const const REFRESH_SPINNER_DURATION_MS = 1000 as const const LOG_COLUMNS: ResourceColumn[] = [ @@ -214,6 +221,11 @@ export default function Logs() { const params = useParams() const workspaceId = params.workspaceId as string + useState(() => { + useFilterStore.getState().initializeFromURL() + return null + }) + const { setWorkspaceId, initializeFromURL, @@ -268,14 +280,11 @@ export default function Logs() { selectedLogId: null, isSidebarOpen: false, }) - const isInitialized = useRef(false) - const pendingExecutionIdRef = useRef(undefined) - if (pendingExecutionIdRef.current === undefined) { - pendingExecutionIdRef.current = - typeof window !== 'undefined' - ? new URLSearchParams(window.location.search).get('executionId') - : null - } + const [pendingExecutionId, setPendingExecutionId] = useState(() => + typeof window !== 'undefined' + ? new URLSearchParams(window.location.search).get('executionId') + : null + ) const [searchQuery, setSearchQuery] = useState(() => { if (typeof window === 'undefined') return '' @@ -287,7 +296,7 @@ export default function Logs() { const [isVisuallyRefreshing, setIsVisuallyRefreshing] = useState(false) const [isExporting, setIsExporting] = useState(false) const refreshTimersRef = useRef(new Set()) - const logsRef = useRef([]) + const logsRef = useRef([]) const selectedLogIndexRef = useRef(-1) const selectedLogIdRef = useRef(null) const shouldScrollIntoViewRef = useRef(false) @@ -304,21 +313,35 @@ export default function Logs() { const [contextMenuOpen, setContextMenuOpen] = useState(false) const [contextMenuPosition, setContextMenuPosition] = useState({ x: 0, y: 0 }) - const [contextMenuLog, setContextMenuLog] = useState(null) + const [contextMenuLog, setContextMenuLog] = useState(null) const [previewLogId, setPreviewLogId] = useState(null) - const activeLogId = previewLogId ?? selectedLogId const queryClient = useQueryClient() - const activeLogQuery = useLogDetail(activeLogId ?? undefined, { - refetchInterval: (query: { state: { data?: WorkflowLog } }) => { + const refetchInterval = useCallback( + (query: { state: { data?: WorkflowLogDetail } }) => { if (!isLive) return false const status = query.state.data?.status return status === 'running' || status === 'pending' ? 3000 : false }, + [isLive] + ) + + const selectedDetailQuery = useLogDetail(selectedLogId ?? undefined, workspaceId, { + refetchInterval, }) + const previewDetailQuery = useLogDetail(previewLogId ?? undefined, workspaceId, { + refetchInterval, + }) + + const sortBy: LogSortBy = + activeSort && SORTABLE_COLUMNS.includes(activeSort.column as LogSortBy) + ? (activeSort.column as LogSortBy) + : 'date' + const sortOrder: LogSortOrder = activeSort?.direction ?? 'desc' + const logFilters = useMemo( () => ({ timeRange, @@ -330,12 +353,24 @@ export default function Logs() { triggers, searchQuery: debouncedSearchQuery, limit: LOGS_PER_PAGE, + sortBy, + sortOrder, }), - [timeRange, startDate, endDate, level, workflowIds, folderIds, triggers, debouncedSearchQuery] + [ + timeRange, + startDate, + endDate, + level, + workflowIds, + folderIds, + triggers, + debouncedSearchQuery, + sortBy, + sortOrder, + ] ) const logsQuery = useLogsList(workspaceId, logFilters, { - enabled: Boolean(workspaceId) && isInitialized.current, refetchInterval: isLive ? 3000 : false, }) @@ -354,7 +389,6 @@ export default function Logs() { ) const dashboardStatsQuery = useDashboardStats(workspaceId, dashboardFilters, { - enabled: Boolean(workspaceId) && isInitialized.current, refetchInterval: isLive ? 3000 : false, }) @@ -362,80 +396,42 @@ export default function Logs() { return logsQuery.data?.pages?.flatMap((page) => page.logs) ?? [] }, [logsQuery.data?.pages]) - const sortedLogs = useMemo(() => { - if (!activeSort) return logs - - const { column, direction } = activeSort - return [...logs].sort((a, b) => { - let cmp = 0 - switch (column) { - case 'date': - cmp = new Date(a.createdAt).getTime() - new Date(b.createdAt).getTime() - break - case 'duration': { - const aDuration = parseDuration({ duration: a.duration ?? undefined }) ?? -1 - const bDuration = parseDuration({ duration: b.duration ?? undefined }) ?? -1 - cmp = aDuration - bDuration - break - } - case 'cost': { - const aCost = typeof a.cost?.total === 'number' ? a.cost.total : -1 - const bCost = typeof b.cost?.total === 'number' ? b.cost.total : -1 - cmp = aCost - bCost - break - } - case 'status': - cmp = (a.status ?? '').localeCompare(b.status ?? '') - break - default: - break - } - return direction === 'asc' ? cmp : -cmp - }) - }, [logs, activeSort]) - - const selectedLogIndex = selectedLogId ? sortedLogs.findIndex((l) => l.id === selectedLogId) : -1 - const selectedLogFromList = selectedLogIndex >= 0 ? sortedLogs[selectedLogIndex] : null - - const selectedLog = useMemo(() => { - if (!selectedLogFromList) return null - if (!activeLogQuery.data || previewLogId !== null) return selectedLogFromList - return { ...selectedLogFromList, ...activeLogQuery.data } - }, [selectedLogFromList, activeLogQuery.data, previewLogId]) + const selectedLogIndex = selectedLogId ? logs.findIndex((l) => l.id === selectedLogId) : -1 + const selectedLogFromList = selectedLogIndex >= 0 ? logs[selectedLogIndex] : null + const selectedLog = selectedDetailQuery.data ?? selectedLogFromList ?? null const handleLogHover = useCallback( (rowId: string) => { - prefetchLogDetail(queryClient, rowId) + prefetchLogDetail(queryClient, rowId, workspaceId) }, - [queryClient] + [queryClient, workspaceId] ) useFolders(workspaceId) - logsRef.current = sortedLogs + logsRef.current = logs selectedLogIndexRef.current = selectedLogIndex selectedLogIdRef.current = selectedLogId logsRefetchRef.current = logsQuery.refetch - activeLogRefetchRef.current = activeLogQuery.refetch + activeLogRefetchRef.current = selectedDetailQuery.refetch logsQueryRef.current = { isFetching: logsQuery.isFetching, hasNextPage: logsQuery.hasNextPage ?? false, fetchNextPage: logsQuery.fetchNextPage, } + const deepLinkQuery = useLogByExecutionId(workspaceId, pendingExecutionId) + useEffect(() => { - if (!pendingExecutionIdRef.current) return - const targetExecutionId = pendingExecutionIdRef.current - const found = sortedLogs.find((l) => l.executionId === targetExecutionId) - if (found) { - pendingExecutionIdRef.current = null - dispatch({ type: 'TOGGLE_LOG', logId: found.id }) - } else if (!logsQuery.hasNextPage && logsQuery.status === 'success') { - pendingExecutionIdRef.current = null - } else if (!logsQuery.isFetching && logsQuery.status === 'success') { - logsQueryRef.current.fetchNextPage() + if (!pendingExecutionId) return + const resolvedId = deepLinkQuery.data?.id + if (resolvedId) { + dispatch({ type: 'TOGGLE_LOG', logId: resolvedId }) + setPendingExecutionId(null) + } else if (deepLinkQuery.isError) { + setPendingExecutionId(null) } - }, [sortedLogs, logsQuery.hasNextPage, logsQuery.isFetching, logsQuery.status]) + }, [pendingExecutionId, deepLinkQuery.data, deepLinkQuery.isError]) useEffect(() => { const timers = refreshTimersRef.current @@ -446,9 +442,7 @@ export default function Logs() { }, []) useEffect(() => { - if (isInitialized.current) { - setStoreSearchQuery(debouncedSearchQuery) - } + setStoreSearchQuery(debouncedSearchQuery) }, [debouncedSearchQuery, setStoreSearchQuery]) const handleLogClick = useCallback((rowId: string) => { @@ -458,7 +452,7 @@ export default function Logs() { const handleNavigateNext = useCallback(() => { const idx = selectedLogIndexRef.current const currentLogs = logsRef.current - if (idx < currentLogs.length - 1) { + if (idx >= 0 && idx < currentLogs.length - 1) { shouldScrollIntoViewRef.current = true dispatch({ type: 'SELECT_LOG', logId: currentLogs[idx + 1].id }) } @@ -484,12 +478,12 @@ export default function Logs() { const handleLogContextMenu = useCallback( (e: React.MouseEvent, rowId: string) => { e.preventDefault() - const log = sortedLogs.find((l) => l.id === rowId) ?? null + const log = logs.find((l) => l.id === rowId) ?? null setContextMenuPosition({ x: e.clientX, y: e.clientY }) setContextMenuLog(log) setContextMenuOpen(true) }, - [sortedLogs] + [logs] ) const handleCopyExecutionId = useCallback(() => { @@ -547,7 +541,7 @@ export default function Logs() { }, [contextMenuLog]) const retryLog = useCallback( - async (log: WorkflowLog | null) => { + async (log: WorkflowLogRow | null) => { const workflowId = log?.workflow?.id || log?.workflowId const logId = log?.id if (!workflowId || !logId) return @@ -555,7 +549,7 @@ export default function Logs() { try { const detailLog = await queryClient.fetchQuery({ queryKey: logKeys.detail(logId), - queryFn: ({ signal }) => fetchLogDetail(logId, signal), + queryFn: ({ signal }) => fetchLogDetail(logId, workspaceId, signal), staleTime: 30 * 1000, }) const input = extractRetryInput(detailLog) @@ -600,7 +594,8 @@ export default function Logs() { } }, [selectedLogId, selectedLogIndex]) - const effectiveSidebarOpen = isSidebarOpen && selectedLogIndex !== -1 + const effectiveSidebarOpen = + isSidebarOpen && (selectedLogIndex !== -1 || !!selectedDetailQuery.data) const triggerVisualRefresh = useCallback(() => { setIsVisuallyRefreshing(true) @@ -676,13 +671,6 @@ export default function Logs() { debouncedSearchQuery, ]) - useEffect(() => { - if (!isInitialized.current) { - isInitialized.current = true - initializeFromURL() - } - }, [initializeFromURL]) - useEffect(() => { const handlePopState = () => { initializeFromURL() @@ -695,12 +683,11 @@ export default function Logs() { }, [initializeFromURL]) const loadMoreLogs = useCallback(() => { - if (activeSort) return const { isFetching, hasNextPage, fetchNextPage } = logsQueryRef.current if (!isFetching && hasNextPage) { fetchNextPage() } - }, [activeSort]) + }, []) useEffect(() => { const handleKeyDown = (e: KeyboardEvent) => { @@ -753,7 +740,7 @@ export default function Logs() { const rows: ResourceRow[] = useMemo( () => - sortedLogs.map((log) => { + logs.map((log) => { const formattedDate = formatDate(log.createdAt) const displayStatus = getDisplayStatus(log.status) const isMothershipJob = log.trigger === 'mothership' @@ -804,7 +791,7 @@ export default function Logs() { }, } }), - [sortedLogs] + [logs] ) const sidebarOverlay = ( @@ -814,7 +801,7 @@ export default function Logs() { onClose={handleCloseSidebar} onNavigateNext={handleNavigateNext} onNavigatePrev={handleNavigatePrev} - hasNext={selectedLogIndex < sortedLogs.length - 1} + hasNext={selectedLogIndex >= 0 && selectedLogIndex < logs.length - 1} hasPrev={selectedLogIndex > 0} onRetryExecution={handleRetrySidebarExecution} isRetryPending={retryExecution.isPending} @@ -1121,7 +1108,7 @@ export default function Logs() { label: 'Export', icon: Download, onClick: handleExport, - disabled: !userPermissions.canEdit || isExporting || sortedLogs.length === 0, + disabled: !userPermissions.canEdit || isExporting || logs.length === 0, }, { label: 'Notifications', @@ -1154,7 +1141,7 @@ export default function Logs() { handleExport, userPermissions.canEdit, isExporting, - sortedLogs.length, + logs.length, handleOpenNotificationSettings, ] ) @@ -1192,7 +1179,7 @@ export default function Logs() { onRowContextMenu={handleLogContextMenu} isLoading={!logsQuery.data} onLoadMore={loadMoreLogs} - hasMore={!activeSort && (logsQuery.hasNextPage ?? false)} + hasMore={logsQuery.hasNextPage ?? false} isLoadingMore={logsQuery.isFetchingNextPage} emptyMessage='No logs found' overlay={sidebarOverlay} @@ -1224,10 +1211,10 @@ export default function Logs() { hasActiveFilters={filtersActive} /> - {previewLogId !== null && activeLogQuery.data?.executionId && ( + {previewLogId !== null && previewDetailQuery.data?.executionId && ( - status?: string - error?: unknown -} - -interface BlockExecution { - outputData?: unknown - errorMessage?: string -} - -interface LogWithExecutionData { - executionData?: { - finalOutput?: unknown - traceSpans?: TraceSpan[] - blockExecutions?: BlockExecution[] - output?: unknown - } - output?: string - message?: string -} - -/** - * Extract output from various sources in execution data. - * Checks multiple locations in priority order: - * 1. executionData.finalOutput - * 2. output (as string) - * 3. executionData.traceSpans (iterates through spans) - * 4. executionData.blockExecutions (last block) - * 5. message (fallback) - * @param log - Log object containing execution data - * @returns Extracted output value or null - */ -export function extractOutput(log: LogWithExecutionData): unknown { - let output: unknown = null - - // Check finalOutput first - if (log.executionData?.finalOutput !== undefined) { - output = log.executionData.finalOutput - } - - // Check direct output field - if (typeof log.output === 'string') { - output = log.output - } else if (log.executionData?.traceSpans && Array.isArray(log.executionData.traceSpans)) { - // Search through trace spans - const spans = log.executionData.traceSpans - for (let i = spans.length - 1; i >= 0; i--) { - const s = spans[i] - if (s?.output && Object.keys(s.output).length > 0) { - output = s.output - break - } - const outputWithError = s?.output as Record | undefined - if (s?.status === 'error' && (outputWithError?.error || s?.error)) { - output = outputWithError?.error || s.error - break - } - } - // Fallback to executionData.output - if (!output && log.executionData?.output) { - output = log.executionData.output - } - } - - // Check block executions - if (!output) { - const blockExecutions = log.executionData?.blockExecutions - if (Array.isArray(blockExecutions) && blockExecutions.length > 0) { - const lastBlock = blockExecutions[blockExecutions.length - 1] - output = lastBlock?.outputData || lastBlock?.errorMessage || null - } - } - - // Final fallback to message - if (!output) { - output = log.message || null - } - - return output -} - -/** Execution log cost breakdown */ -interface ExecutionCost { - input: number - output: number - total: number -} - -/** Mapped execution log format for UI consumption */ -export interface ExecutionLog { - id: string - executionId: string - startedAt: string - level: string - status: string - trigger: string - triggerUserId: string | null - triggerInputs?: unknown - outputs?: unknown - errorMessage: string | null - duration: number | null - cost: ExecutionCost | null - workflowName?: string - workflowColor?: string - hasPendingPause?: boolean -} - -/** Raw API log response structure */ -interface RawLogResponse extends LogWithDuration, LogWithExecutionData { - id: string - executionId: string - startedAt?: string - endedAt?: string - createdAt?: string - level?: string - status?: string - trigger?: string - triggerUserId?: string | null - error?: string - cost?: { - input?: number - output?: number - total?: number - } - workflowName?: string - workflowColor?: string - workflow?: { - name?: string - color?: string - } - hasPendingPause?: boolean -} - -/** - * Convert raw API log response to ExecutionLog format. - * @param log - Raw log response from API - * @returns Formatted execution log - */ -export function mapToExecutionLog(log: RawLogResponse): ExecutionLog { - const started = log.startedAt - ? new Date(log.startedAt) - : log.endedAt - ? new Date(log.endedAt) - : null - - const startedAt = - started && !Number.isNaN(started.getTime()) ? started.toISOString() : new Date().toISOString() - - const duration = parseDuration(log) - const output = extractOutput(log) - - return { - id: log.id, - executionId: log.executionId, - startedAt, - level: log.level || 'info', - status: log.status || 'completed', - trigger: log.trigger || 'manual', - triggerUserId: log.triggerUserId || null, - triggerInputs: undefined, - outputs: output || undefined, - errorMessage: log.error || null, - duration, - cost: log.cost - ? { - input: log.cost.input || 0, - output: log.cost.output || 0, - total: log.cost.total || 0, - } - : null, - workflowName: log.workflowName || log.workflow?.name, - workflowColor: log.workflowColor || log.workflow?.color, - hasPendingPause: log.hasPendingPause === true, - } -} - -/** - * Alternative version that uses createdAt as fallback for startedAt. - * Used in some API responses. - * @param log - Raw log response from API - * @returns Formatted execution log - */ -export function mapToExecutionLogAlt(log: RawLogResponse): ExecutionLog { - const duration = parseDuration(log) - const output = extractOutput(log) - - return { - id: log.id, - executionId: log.executionId, - startedAt: log.createdAt || log.startedAt || new Date().toISOString(), - level: log.level || 'info', - status: log.status || 'completed', - trigger: log.trigger || 'manual', - triggerUserId: log.triggerUserId || null, - triggerInputs: undefined, - outputs: output || undefined, - errorMessage: log.error || null, - duration, - cost: log.cost - ? { - input: log.cost.input || 0, - output: log.cost.output || 0, - total: log.cost.total || 0, - } - : null, - workflowName: log.workflow?.name, - workflowColor: log.workflow?.color, - hasPendingPause: log.hasPendingPause === true, - } -} - /** * Format latency value for display in dashboard UI * @param ms - Latency in milliseconds (number) @@ -449,15 +226,15 @@ export const formatDate = (dateString: string) => { * Prefers the persisted `workflowInput` field (new logs), falls back to * reconstructing from `executionState.blockStates` (old logs). */ -export function extractRetryInput(log: WorkflowLog): unknown | undefined { - const execData = log.executionData as Record | undefined +export function extractRetryInput(log: WorkflowLogDetail): unknown | undefined { + const execData = log.executionData if (!execData) return undefined if (execData.workflowInput !== undefined) { return execData.workflowInput } - const executionState = execData.executionState as + const executionState = (execData as Record).executionState as | { blockStates?: Record< string, diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/user-input/hooks/use-mention-data.ts b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/user-input/hooks/use-mention-data.ts index c4b0e1e5e67..411ab163f47 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/user-input/hooks/use-mention-data.ts +++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/user-input/hooks/use-mention-data.ts @@ -346,7 +346,7 @@ export function useMentionData(props: UseMentionDataProps): MentionDataReturn { try { setIsLoadingLogs(true) const data = await requestJson(listLogsContract, { - query: { workspaceId, limit: 50, details: 'full' }, + query: { workspaceId, limit: 50 }, }) const items = data.data const mapped = items.map((l) => ({ diff --git a/apps/sim/hooks/queries/logs.ts b/apps/sim/hooks/queries/logs.ts index d2f4bbfa4e9..00b1aac4985 100644 --- a/apps/sim/hooks/queries/logs.ts +++ b/apps/sim/hooks/queries/logs.ts @@ -15,22 +15,27 @@ import { type ExecutionSnapshotData, getDashboardStatsContract, getExecutionSnapshotContract, + getLogByExecutionIdContract, getLogDetailContract, listLogsContract, type SegmentStats, - type WorkflowLogData, + type WorkflowLogDetail, + type WorkflowLogSummary, type WorkflowStats, } from '@/lib/api/contracts/logs' import { getEndDateFromTimeRange, getStartDateFromTimeRange } from '@/lib/logs/filters' import { parseQuery, queryToApiParams } from '@/lib/logs/query-parser' -import type { TimeRange, WorkflowLog } from '@/stores/logs/filters/types' +import type { TimeRange } from '@/stores/logs/filters/types' export type { DashboardStatsResponse, SegmentStats, WorkflowStats } +export type LogSortBy = 'date' | 'duration' | 'cost' | 'status' +export type LogSortOrder = 'asc' | 'desc' + export const logKeys = { all: ['logs'] as const, lists: () => [...logKeys.all, 'list'] as const, - list: (workspaceId: string | undefined, filters: Omit) => + list: (workspaceId: string | undefined, filters: LogFilters) => [...logKeys.lists(), workspaceId ?? '', filters] as const, details: () => [...logKeys.all, 'detail'] as const, detail: (logId: string | undefined) => [...logKeys.details(), logId ?? ''] as const, @@ -45,7 +50,7 @@ export const logKeys = { [...logKeys.executionSnapshots(), executionId ?? ''] as const, } -interface LogFilters { +export interface LogFilters { timeRange: TimeRange startDate?: string endDate?: string @@ -55,15 +60,14 @@ interface LogFilters { triggers: string[] searchQuery: string limit: number + sortBy: LogSortBy + sortOrder: LogSortOrder } -const toWorkflowLog = (log: WorkflowLogData): WorkflowLog => log as WorkflowLog - -/** - * Applies common filter parameters to a URLSearchParams object. - * Shared between paginated and non-paginated log fetches. - */ -function applyFilterParams(params: URLSearchParams, filters: Omit): void { +function applyFilterParams( + params: URLSearchParams, + filters: Omit +): void { if (filters.level !== 'all') { params.set('level', filters.level) } @@ -100,61 +104,53 @@ function applyFilterParams(params: URLSearchParams, filters: Omit { +): Promise { const apiData = await requestJson(listLogsContract, { - query: buildQueryParams(workspaceId, filters, page), + query: buildListQuery(workspaceId, filters, cursor), signal, }) - const hasMore = apiData.data.length === filters.limit && apiData.page < apiData.totalPages return { - logs: apiData.data.map(toWorkflowLog), - hasMore, - nextPage: hasMore ? page + 1 : undefined, + logs: apiData.data, + nextCursor: apiData.nextCursor, } } -export async function fetchLogDetail(logId: string, signal?: AbortSignal): Promise { - const { data } = await requestJson(getLogDetailContract, { - params: { id: logId }, - signal, - }) - return toWorkflowLog(data) -} - -async function fetchLogByExecutionId( +export async function fetchLogDetail( + logId: string, workspaceId: string, - executionId: string, signal?: AbortSignal -): Promise { - const apiData = await requestJson(listLogsContract, { - query: { - workspaceId, - executionId, - details: 'full', - limit: 1, - }, +): Promise { + const { data } = await requestJson(getLogDetailContract, { + params: { id: logId }, + query: { workspaceId }, signal, }) - return apiData.data?.[0] ? toWorkflowLog(apiData.data[0]) : null + return data } interface UseLogsListOptions { @@ -173,10 +169,10 @@ export function useLogsList( fetchLogsPage(workspaceId as string, filters, pageParam, signal), enabled: Boolean(workspaceId) && (options?.enabled ?? true), refetchInterval: options?.refetchInterval ?? false, - staleTime: 0, + staleTime: 30 * 1000, placeholderData: keepPreviousData, - initialPageParam: 1, - getNextPageParam: (lastPage) => lastPage.nextPage, + initialPageParam: null as string | null, + getNextPageParam: (lastPage) => lastPage.nextCursor, }) } @@ -185,14 +181,18 @@ interface UseLogDetailOptions { refetchInterval?: | number | false - | ((query: { state: { data?: WorkflowLog } }) => number | false | undefined) + | ((query: { state: { data?: WorkflowLogDetail } }) => number | false | undefined) } -export function useLogDetail(logId: string | undefined, options?: UseLogDetailOptions) { +export function useLogDetail( + logId: string | undefined, + workspaceId: string | undefined, + options?: UseLogDetailOptions +) { return useQuery({ queryKey: logKeys.detail(logId), - queryFn: ({ signal }) => fetchLogDetail(logId as string, signal), - enabled: Boolean(logId) && (options?.enabled ?? true), + queryFn: ({ signal }) => fetchLogDetail(logId as string, workspaceId as string, signal), + enabled: Boolean(logId) && Boolean(workspaceId) && (options?.enabled ?? true), refetchInterval: options?.refetchInterval ?? false, staleTime: 30 * 1000, retry: (failureCount, err) => @@ -200,42 +200,38 @@ export function useLogDetail(logId: string | undefined, options?: UseLogDetailOp }) } -/** - * Looks up a workflow log by its `executionId` (the id stored on table workflow cells). - * Returns the full log shape so the LogDetails sidebar can render directly without - * an extra detail fetch. - */ export function useLogByExecutionId( workspaceId: string | undefined, executionId: string | null | undefined ) { + const queryClient = useQueryClient() return useQuery({ queryKey: logKeys.byExecution(workspaceId, executionId ?? undefined), - queryFn: ({ signal }) => - fetchLogByExecutionId(workspaceId as string, executionId as string, signal), + queryFn: async ({ signal }) => { + const { data } = await requestJson(getLogByExecutionIdContract, { + params: { executionId: executionId as string }, + query: { workspaceId: workspaceId as string }, + signal, + }) + queryClient.setQueryData(logKeys.detail(data.id), data) + return data + }, enabled: Boolean(workspaceId) && Boolean(executionId), staleTime: 30 * 1000, }) } -/** - * Prefetches log detail data on hover for instant panel rendering on click. - */ -export function prefetchLogDetail(queryClient: QueryClient, logId: string) { +export function prefetchLogDetail(queryClient: QueryClient, logId: string, workspaceId: string) { queryClient.prefetchQuery({ queryKey: logKeys.detail(logId), - queryFn: ({ signal }) => fetchLogDetail(logId, signal), + queryFn: ({ signal }) => fetchLogDetail(logId, workspaceId, signal), staleTime: 30 * 1000, }) } -/** - * Fetches dashboard stats from the server-side aggregation endpoint. - * Uses SQL aggregation for efficient computation without arbitrary limits. - */ async function fetchDashboardStats( workspaceId: string, - filters: Omit, + filters: Omit, signal?: AbortSignal ): Promise { const params = new URLSearchParams() @@ -255,13 +251,9 @@ interface UseDashboardStatsOptions { refetchInterval?: number | false } -/** - * Hook for fetching dashboard stats using server-side aggregation. - * No arbitrary limits - uses SQL aggregation for accurate metrics. - */ export function useDashboardStats( workspaceId: string | undefined, - filters: Omit, + filters: Omit, options?: UseDashboardStatsOptions ) { return useQuery({ @@ -269,7 +261,7 @@ export function useDashboardStats( queryFn: ({ signal }) => fetchDashboardStats(workspaceId as string, filters, signal), enabled: Boolean(workspaceId) && (options?.enabled ?? true), refetchInterval: options?.refetchInterval ?? false, - staleTime: 0, + staleTime: 30 * 1000, placeholderData: keepPreviousData, }) } @@ -296,12 +288,10 @@ export function useExecutionSnapshot(executionId: string | undefined) { queryKey: logKeys.executionSnapshot(executionId), queryFn: ({ signal }) => fetchExecutionSnapshot(executionId as string, signal), enabled: Boolean(executionId), - staleTime: 5 * 60 * 1000, // 5 minutes - execution snapshots don't change + staleTime: 5 * 60 * 1000, }) } -type LogsPage = { logs: WorkflowLog[]; hasMore: boolean; nextPage: number | undefined } - export function useCancelExecution() { const queryClient = useQueryClient() return useMutation({ @@ -325,29 +315,47 @@ export function useCancelExecution() { queryKey: logKeys.lists(), }) + let affectedLogId: string | null = null queryClient.setQueriesData>({ queryKey: logKeys.lists() }, (old) => { if (!old) return old return { ...old, pages: old.pages.map((page) => ({ ...page, - logs: page.logs.map((log) => - log.executionId === executionId ? { ...log, status: 'cancelling' } : log - ), + logs: page.logs.map((log) => { + if (log.executionId !== executionId) return log + affectedLogId = log.id + return { ...log, status: 'cancelling' } + }), })), } }) - return { previousQueries } + let previousDetail: WorkflowLogDetail | undefined + if (affectedLogId) { + previousDetail = queryClient.getQueryData(logKeys.detail(affectedLogId)) + if (previousDetail) { + queryClient.setQueryData(logKeys.detail(affectedLogId), { + ...previousDetail, + status: 'cancelling', + }) + } + } + + return { previousQueries, affectedLogId, previousDetail } }, onError: (_err, _variables, context) => { for (const [queryKey, data] of context?.previousQueries ?? []) { queryClient.setQueryData(queryKey, data) } + if (context?.affectedLogId && context.previousDetail !== undefined) { + queryClient.setQueryData(logKeys.detail(context.affectedLogId), context.previousDetail) + } }, onSettled: () => { queryClient.invalidateQueries({ queryKey: logKeys.lists() }) queryClient.invalidateQueries({ queryKey: logKeys.details() }) + queryClient.invalidateQueries({ queryKey: logKeys.byExecutionAll() }) queryClient.invalidateQueries({ queryKey: logKeys.stats() }) }, }) @@ -367,9 +375,6 @@ export function useRetryExecution() { const data = await res.json().catch(() => ({})) throw new Error(data.error || 'Failed to retry execution') } - // The ReadableStream is lazy — start() only runs when read. - // Read one chunk to trigger execution, then cancel. Execution continues - // server-side after client disconnect. const reader = res.body?.getReader() if (reader) { await reader.read() @@ -380,6 +385,7 @@ export function useRetryExecution() { onSettled: () => { queryClient.invalidateQueries({ queryKey: logKeys.lists() }) queryClient.invalidateQueries({ queryKey: logKeys.details() }) + queryClient.invalidateQueries({ queryKey: logKeys.byExecutionAll() }) queryClient.invalidateQueries({ queryKey: logKeys.stats() }) }, }) diff --git a/apps/sim/lib/api/contracts/logs.ts b/apps/sim/lib/api/contracts/logs.ts index b0298e349ec..6e94720f91a 100644 --- a/apps/sim/lib/api/contracts/logs.ts +++ b/apps/sim/lib/api/contracts/logs.ts @@ -34,10 +34,18 @@ const logFilterQuerySchema = z.object({ durationValue: z.coerce.number().optional(), }) +export const logSortBySchema = z.enum(['date', 'duration', 'cost', 'status']).default('date') +export const logSortOrderSchema = z.enum(['asc', 'desc']).default('desc') + export const listLogsQuerySchema = logFilterQuerySchema.extend({ - details: z.enum(['basic', 'full']).optional().default('basic'), - limit: z.coerce.number().optional().default(100), - offset: z.coerce.number().optional().default(0), + cursor: z.string().optional(), + limit: z.coerce.number().int().min(1).max(200).optional().default(100), + sortBy: logSortBySchema, + sortOrder: logSortOrderSchema, +}) + +export const logDetailQuerySchema = z.object({ + workspaceId: z.string().min(1), }) export const statsQueryParamsSchema = logFilterQuerySchema.extend({ @@ -58,55 +66,196 @@ const workflowSummarySchema = z }) .partial() -const fileSchema = z +const fileSchema = z.object({ + id: z.string(), + name: z.string(), + size: z.number(), + type: z.string(), + url: z.string(), + key: z.string(), + uploadedAt: z.string(), + expiresAt: z.string(), + storageProvider: z.enum(['s3', 'blob', 'local']).optional(), + bucketName: z.string().optional(), +}) + +const tokenBreakdownSchema = z .object({ - id: z.string(), - name: z.string(), - size: z.number(), - type: z.string(), - url: z.string(), - key: z.string(), - uploadedAt: z.string(), - expiresAt: z.string(), - storageProvider: z.enum(['s3', 'blob', 'local']).optional(), - bucketName: z.string().optional(), + total: z.number().optional(), + input: z.number().optional(), + output: z.number().optional(), + prompt: z.number().optional(), + completion: z.number().optional(), + }) + .partial() + +const modelCostSchema = z + .object({ + input: z.number().optional(), + output: z.number().optional(), + total: z.number().optional(), + tokens: tokenBreakdownSchema.optional(), + }) + .partial() + +const costSummarySchema = z + .object({ + total: z.number().optional(), + input: z.number().optional(), + output: z.number().optional(), + tokens: tokenBreakdownSchema.optional(), + models: z.record(z.string(), modelCostSchema).optional(), + pricing: z + .object({ + input: z.number(), + output: z.number(), + cachedInput: z.number().optional(), + updatedAt: z.string(), + }) + .optional(), + }) + .partial() + +const pauseSummarySchema = z.object({ + status: z.string().nullable(), + total: z.number(), + resumed: z.number(), +}) + +const blockExecutionSchema = z.object({ + id: z.string(), + blockId: z.string(), + blockName: z.string(), + blockType: z.string(), + startedAt: z.string(), + endedAt: z.string(), + durationMs: z.number(), + status: z.enum(['success', 'error', 'skipped']), + errorMessage: z.string().optional(), + errorStackTrace: z.string().optional(), + inputData: z.unknown(), + outputData: z.unknown(), + cost: costSummarySchema.optional(), + metadata: z.record(z.string(), z.unknown()).optional(), +}) + +const toolCallSchema = z + .object({ + id: z.string().optional(), + name: z.string().optional(), + arguments: z.unknown().optional(), + result: z.unknown().optional(), + error: z.string().optional(), + startTime: z.string().optional(), + endTime: z.string().optional(), + duration: z.number().optional(), }) .passthrough() -export const workflowLogSchema = z +type TraceSpan = { + id: string + name: string + type: string + duration?: number + durationMs?: number + startTime?: string + endTime?: string + status?: string + blockId?: string + input?: unknown + output?: unknown + tokens?: number | { total?: number; input?: number; output?: number } + relativeStartMs?: number + toolCalls?: Array> + children?: TraceSpan[] +} + +const traceSpanSchema: z.ZodType = z.lazy(() => + z + .object({ + id: z.string(), + name: z.string(), + type: z.string(), + duration: z.number().optional(), + durationMs: z.number().optional(), + startTime: z.string().optional(), + endTime: z.string().optional(), + status: z.string().optional(), + blockId: z.string().optional(), + input: z.unknown().optional(), + output: z.unknown().optional(), + tokens: z + .union([ + z.number(), + z + .object({ + total: z.number().optional(), + input: z.number().optional(), + output: z.number().optional(), + }) + .partial(), + ]) + .optional(), + relativeStartMs: z.number().optional(), + toolCalls: z.array(toolCallSchema).optional(), + children: z.array(traceSpanSchema).optional(), + }) + .passthrough() +) + +const executionDataDetailSchema = z .object({ - id: z.string(), - workflowId: z.string().nullable(), - executionId: z.string().nullable().optional(), - deploymentVersionId: z.string().nullable().optional(), - deploymentVersion: z.number().nullable().optional(), - deploymentVersionName: z.string().nullable().optional(), - level: z.string(), - status: z.string().nullable().optional(), - duration: z.string().nullable(), - trigger: z.string().nullable(), - createdAt: z.string(), - workflow: workflowSummarySchema.nullable().optional(), - jobTitle: z.string().nullable().optional(), - files: z.array(fileSchema).optional(), - cost: z.unknown().optional(), - hasPendingPause: z.boolean().nullable().optional(), - pauseSummary: z.unknown().optional(), - executionData: z.unknown().optional(), + totalDuration: z.number().nullable().optional(), + enhanced: z.literal(true).optional(), + traceSpans: z.array(traceSpanSchema).optional(), + blockExecutions: z.array(blockExecutionSchema).optional(), + finalOutput: z.unknown().optional(), + workflowInput: z.unknown().optional(), + blockInput: z.record(z.string(), z.unknown()).optional(), + trigger: z.unknown().optional(), }) .passthrough() -export type WorkflowLogData = z.output +export const workflowLogSummarySchema = z.object({ + id: z.string(), + workflowId: z.string().nullable(), + executionId: z.string().nullable(), + deploymentVersionId: z.string().nullable(), + deploymentVersion: z.number().nullable(), + deploymentVersionName: z.string().nullable(), + level: z.string(), + status: z.string().nullable(), + duration: z.string().nullable(), + trigger: z.string().nullable(), + createdAt: z.string(), + workflow: workflowSummarySchema.nullable(), + jobTitle: z.string().nullable(), + cost: costSummarySchema.nullable(), + pauseSummary: pauseSummarySchema, + hasPendingPause: z.boolean(), +}) -export const logsResponseSchema = z.object({ - data: z.array(workflowLogSchema), - total: z.number(), - page: z.number(), - pageSize: z.number(), - totalPages: z.number(), +export const workflowLogDetailSchema = workflowLogSummarySchema.extend({ + executionData: executionDataDetailSchema, + files: z.array(fileSchema).nullable(), }) -export type LogsResponse = z.output +export type WorkflowLogSummary = z.output +export type WorkflowLogDetail = z.output + +/** + * A row that may be either a list-view summary or a fully loaded detail. Used by + * UI surfaces that render the same log before and after its detail query resolves. + */ +export type WorkflowLogRow = WorkflowLogSummary & + Partial> + +export const listLogsResponseSchema = z.object({ + data: z.array(workflowLogSummarySchema), + nextCursor: z.string().nullable(), +}) + +export type ListLogsResponse = z.output export const segmentStatsSchema = z.object({ timestamp: z.string(), @@ -179,7 +328,7 @@ export const listLogsContract = defineRouteContract({ query: listLogsQuerySchema, response: { mode: 'json', - schema: logsResponseSchema, + schema: listLogsResponseSchema, }, }) @@ -187,10 +336,24 @@ export const getLogDetailContract = defineRouteContract({ method: 'GET', path: '/api/logs/[id]', params: logIdParamsSchema, + query: logDetailQuerySchema, + response: { + mode: 'json', + schema: z.object({ + data: workflowLogDetailSchema, + }), + }, +}) + +export const getLogByExecutionIdContract = defineRouteContract({ + method: 'GET', + path: '/api/logs/by-execution/[executionId]', + params: executionIdParamsSchema, + query: logDetailQuerySchema, response: { mode: 'json', schema: z.object({ - data: workflowLogSchema, + data: workflowLogDetailSchema, }), }, }) diff --git a/apps/sim/lib/logs/fetch-log-detail.ts b/apps/sim/lib/logs/fetch-log-detail.ts new file mode 100644 index 00000000000..1a5aea4dc26 --- /dev/null +++ b/apps/sim/lib/logs/fetch-log-detail.ts @@ -0,0 +1,197 @@ +import { db } from '@sim/db' +import { + jobExecutionLogs, + pausedExecutions, + permissions, + workflow, + workflowDeploymentVersion, + workflowExecutionLogs, +} from '@sim/db/schema' +import { and, eq, type SQL } from 'drizzle-orm' + +type LookupColumn = 'id' | 'executionId' + +interface FetchLogDetailArgs { + userId: string + workspaceId: string + lookupColumn: LookupColumn + lookupValue: string +} + +/** + * Shared loader for the workflow-log detail shape returned by the by-id and + * by-execution routes. Returns `null` when no matching row exists in either + * the workflow-execution or job-execution tables for this user + workspace. + */ +export async function fetchLogDetail({ + userId, + workspaceId, + lookupColumn, + lookupValue, +}: FetchLogDetailArgs) { + const workflowMatch: SQL = + lookupColumn === 'id' + ? eq(workflowExecutionLogs.id, lookupValue) + : eq(workflowExecutionLogs.executionId, lookupValue) + + const rows = await db + .select({ + id: workflowExecutionLogs.id, + workflowId: workflowExecutionLogs.workflowId, + executionId: workflowExecutionLogs.executionId, + deploymentVersionId: workflowExecutionLogs.deploymentVersionId, + level: workflowExecutionLogs.level, + status: workflowExecutionLogs.status, + trigger: workflowExecutionLogs.trigger, + startedAt: workflowExecutionLogs.startedAt, + endedAt: workflowExecutionLogs.endedAt, + totalDurationMs: workflowExecutionLogs.totalDurationMs, + executionData: workflowExecutionLogs.executionData, + cost: workflowExecutionLogs.cost, + files: workflowExecutionLogs.files, + createdAt: workflowExecutionLogs.createdAt, + workflowName: workflow.name, + workflowDescription: workflow.description, + workflowColor: workflow.color, + workflowFolderId: workflow.folderId, + workflowUserId: workflow.userId, + workflowWorkspaceId: workflow.workspaceId, + workflowCreatedAt: workflow.createdAt, + workflowUpdatedAt: workflow.updatedAt, + deploymentVersion: workflowDeploymentVersion.version, + deploymentVersionName: workflowDeploymentVersion.name, + pausedStatus: pausedExecutions.status, + pausedTotalPauseCount: pausedExecutions.totalPauseCount, + pausedResumedCount: pausedExecutions.resumedCount, + }) + .from(workflowExecutionLogs) + .leftJoin(workflow, eq(workflowExecutionLogs.workflowId, workflow.id)) + .leftJoin( + workflowDeploymentVersion, + eq(workflowDeploymentVersion.id, workflowExecutionLogs.deploymentVersionId) + ) + .leftJoin(pausedExecutions, eq(pausedExecutions.executionId, workflowExecutionLogs.executionId)) + .innerJoin( + permissions, + and( + eq(permissions.entityType, 'workspace'), + eq(permissions.entityId, workflowExecutionLogs.workspaceId), + eq(permissions.userId, userId) + ) + ) + .where(and(workflowMatch, eq(workflowExecutionLogs.workspaceId, workspaceId))) + .limit(1) + + const log = rows[0] + + if (log) { + const workflowSummary = log.workflowId + ? { + id: log.workflowId, + name: log.workflowName, + description: log.workflowDescription, + color: log.workflowColor, + folderId: log.workflowFolderId, + userId: log.workflowUserId, + workspaceId: log.workflowWorkspaceId, + createdAt: log.workflowCreatedAt?.toISOString() ?? null, + updatedAt: log.workflowUpdatedAt?.toISOString() ?? null, + } + : null + + const totalPauseCount = Number(log.pausedTotalPauseCount ?? 0) + const resumedCount = Number(log.pausedResumedCount ?? 0) + const hasPendingPause = + (totalPauseCount > 0 && resumedCount < totalPauseCount) || + (log.pausedStatus !== null && log.pausedStatus !== 'fully_resumed') + + return { + id: log.id, + workflowId: log.workflowId, + executionId: log.executionId, + deploymentVersionId: log.deploymentVersionId, + deploymentVersion: log.deploymentVersion ?? null, + deploymentVersionName: log.deploymentVersionName ?? null, + level: log.level, + status: log.status, + duration: log.totalDurationMs ? `${log.totalDurationMs}ms` : null, + trigger: log.trigger, + createdAt: log.startedAt.toISOString(), + workflow: workflowSummary, + jobTitle: null, + cost: log.cost ?? null, + pauseSummary: { + status: log.pausedStatus ?? null, + total: totalPauseCount, + resumed: resumedCount, + }, + hasPendingPause, + executionData: { + totalDuration: log.totalDurationMs, + ...((log.executionData as Record | null) ?? {}), + enhanced: true as const, + }, + files: log.files ?? null, + } + } + + const jobMatch: SQL = + lookupColumn === 'id' + ? eq(jobExecutionLogs.id, lookupValue) + : eq(jobExecutionLogs.executionId, lookupValue) + + const jobRows = await db + .select({ + id: jobExecutionLogs.id, + executionId: jobExecutionLogs.executionId, + level: jobExecutionLogs.level, + status: jobExecutionLogs.status, + trigger: jobExecutionLogs.trigger, + startedAt: jobExecutionLogs.startedAt, + endedAt: jobExecutionLogs.endedAt, + totalDurationMs: jobExecutionLogs.totalDurationMs, + executionData: jobExecutionLogs.executionData, + cost: jobExecutionLogs.cost, + createdAt: jobExecutionLogs.createdAt, + }) + .from(jobExecutionLogs) + .innerJoin( + permissions, + and( + eq(permissions.entityType, 'workspace'), + eq(permissions.entityId, jobExecutionLogs.workspaceId), + eq(permissions.userId, userId) + ) + ) + .where(and(jobMatch, eq(jobExecutionLogs.workspaceId, workspaceId))) + .limit(1) + + const jobLog = jobRows[0] + if (!jobLog) return null + + const execData = (jobLog.executionData as Record | null) ?? {} + return { + id: jobLog.id, + workflowId: null, + executionId: jobLog.executionId, + deploymentVersionId: null, + deploymentVersion: null, + deploymentVersionName: null, + level: jobLog.level, + status: jobLog.status, + duration: jobLog.totalDurationMs ? `${jobLog.totalDurationMs}ms` : null, + trigger: jobLog.trigger, + createdAt: jobLog.startedAt.toISOString(), + workflow: null, + jobTitle: ((execData.trigger as Record | undefined)?.source as string) ?? null, + cost: jobLog.cost ?? null, + pauseSummary: { status: null, total: 0, resumed: 0 }, + hasPendingPause: false, + executionData: { + totalDuration: jobLog.totalDurationMs, + ...execData, + enhanced: true as const, + }, + files: null, + } +} diff --git a/apps/sim/stores/logs/filters/types.ts b/apps/sim/stores/logs/filters/types.ts index 3fbd85bfaee..cf95d3bee3e 100644 --- a/apps/sim/stores/logs/filters/types.ts +++ b/apps/sim/stores/logs/filters/types.ts @@ -1,113 +1,3 @@ -import type { ProviderTiming, TokenInfo, ToolCall, TraceSpan } from '@/lib/logs/types' - -export type { ProviderTiming, TokenInfo, ToolCall, TraceSpan } - -export interface WorkflowData { - id: string - name: string - description: string | null - color: string - state: any -} - -export interface ToolCallMetadata { - toolCalls?: ToolCall[] -} - -export interface CostMetadata { - models?: Record< - string, - { - input: number - output: number - total: number - tokens?: { - input?: number - output?: number - prompt?: number - completion?: number - total?: number - } - } - > - input?: number - output?: number - total?: number - tokens?: { - input?: number - output?: number - prompt?: number - completion?: number - total?: number - } - pricing?: { - input: number - output: number - cachedInput?: number - updatedAt: string - } -} - -export interface WorkflowLog { - id: string - workflowId: string | null - executionId?: string | null - deploymentVersion?: number | null - deploymentVersionName?: string | null - level: string - status?: string | null - duration: string | null - trigger: string | null - createdAt: string - workflow?: WorkflowData | null - jobTitle?: string | null - files?: Array<{ - id: string - name: string - size: number - type: string - url: string - key: string - uploadedAt: string - expiresAt: string - storageProvider?: 's3' | 'blob' | 'local' - bucketName?: string - }> - cost?: CostMetadata - hasPendingPause?: boolean - executionData?: ToolCallMetadata & { - traceSpans?: TraceSpan[] - totalDuration?: number - blockInput?: Record - enhanced?: boolean - - blockExecutions?: Array<{ - id: string - blockId: string - blockName: string - blockType: string - startedAt: string - endedAt: string - durationMs: number - status: 'success' | 'error' | 'skipped' - errorMessage?: string - errorStackTrace?: string - inputData: unknown - outputData: unknown - cost?: CostMetadata - metadata: Record - }> - } -} - -export interface LogsResponse { - data: WorkflowLog[] - total: number - page: number - pageSize: number - totalPages: number -} - export type TimeRange = | 'Past 30 minutes' | 'Past hour' @@ -129,6 +19,7 @@ export type LogLevel = | 'cancelled' | 'all' | (string & {}) + /** Core trigger types for workflow execution */ export const CORE_TRIGGER_TYPES = [ 'manual', diff --git a/scripts/check-api-validation-contracts.ts b/scripts/check-api-validation-contracts.ts index 34cbacb0f6e..14a57e05fad 100644 --- a/scripts/check-api-validation-contracts.ts +++ b/scripts/check-api-validation-contracts.ts @@ -9,8 +9,8 @@ const QUERY_HOOKS_DIR = path.join(ROOT, 'apps/sim/hooks/queries') const SELECTOR_HOOKS_DIR = path.join(ROOT, 'apps/sim/hooks/selectors') const BASELINE = { - totalRoutes: 725, - zodRoutes: 725, + totalRoutes: 726, + zodRoutes: 726, nonZodRoutes: 0, } as const From 1dc6f7dd09082480bbbe071c91c847d806e657b9 Mon Sep 17 00:00:00 2001 From: Alan Daniel Date: Mon, 4 May 2026 19:28:18 -0400 Subject: [PATCH 11/15] fix: double wrap reponse of guest session handler (#4438) * v0.6.29: login improvements, posthog telemetry (#4026) * feat(posthog): Add tracking on mothership abort (#4023) Co-authored-by: Theodore Li * fix(login): fix captcha headers for manual login (#4025) * fix(signup): fix turnstile key loading * fix(login): fix captcha header passing * Catch user already exists, remove login form captcha * fix double wrap reponse of guest session handler * remove dead code, and fix test --------- Co-authored-by: Waleed Co-authored-by: Theodore Li Co-authored-by: Siddharth Ganesan <33737564+Sg312@users.noreply.github.com> Co-authored-by: Vikhyath Mondreti Co-authored-by: Theodore Li --- apps/sim/app/api/auth/[...all]/route.test.ts | 16 ++++++---------- apps/sim/app/api/auth/[...all]/route.ts | 4 ++-- apps/sim/lib/auth/anonymous.ts | 4 ---- 3 files changed, 8 insertions(+), 16 deletions(-) diff --git a/apps/sim/app/api/auth/[...all]/route.test.ts b/apps/sim/app/api/auth/[...all]/route.test.ts index d9aa74cab91..f87f1a01673 100644 --- a/apps/sim/app/api/auth/[...all]/route.test.ts +++ b/apps/sim/app/api/auth/[...all]/route.test.ts @@ -8,11 +8,9 @@ const handlerMocks = vi.hoisted(() => ({ betterAuthGET: vi.fn(), betterAuthPOST: vi.fn(), ensureAnonymousUserExists: vi.fn(), - createAnonymousGetSessionResponse: vi.fn(() => ({ - data: { - user: { id: 'anon' }, - session: { id: 'anon-session' }, - }, + createAnonymousSession: vi.fn(() => ({ + user: { id: 'anon' }, + session: { id: 'anon-session' }, })), isAuthDisabled: false, })) @@ -30,7 +28,7 @@ vi.mock('@/lib/auth', () => ({ vi.mock('@/lib/auth/anonymous', () => ({ ensureAnonymousUserExists: handlerMocks.ensureAnonymousUserExists, - createAnonymousGetSessionResponse: handlerMocks.createAnonymousGetSessionResponse, + createAnonymousSession: handlerMocks.createAnonymousSession, })) vi.mock('@/lib/core/config/feature-flags', () => ({ @@ -63,10 +61,8 @@ describe('auth catch-all route (DISABLE_AUTH get-session)', () => { expect(handlerMocks.ensureAnonymousUserExists).toHaveBeenCalledTimes(1) expect(handlerMocks.betterAuthGET).not.toHaveBeenCalled() expect(json).toEqual({ - data: { - user: { id: 'anon' }, - session: { id: 'anon-session' }, - }, + user: { id: 'anon' }, + session: { id: 'anon-session' }, }) }) diff --git a/apps/sim/app/api/auth/[...all]/route.ts b/apps/sim/app/api/auth/[...all]/route.ts index b09ce7e7e67..6ff9bfd6db2 100644 --- a/apps/sim/app/api/auth/[...all]/route.ts +++ b/apps/sim/app/api/auth/[...all]/route.ts @@ -1,7 +1,7 @@ import { toNextJsHandler } from 'better-auth/next-js' import { type NextRequest, NextResponse } from 'next/server' import { auth } from '@/lib/auth' -import { createAnonymousGetSessionResponse, ensureAnonymousUserExists } from '@/lib/auth/anonymous' +import { createAnonymousSession, ensureAnonymousUserExists } from '@/lib/auth/anonymous' import { isAuthDisabled } from '@/lib/core/config/feature-flags' import { withRouteHandler } from '@/lib/core/utils/with-route-handler' @@ -24,7 +24,7 @@ export const GET = withRouteHandler(async (request: NextRequest) => { if (path === 'get-session' && isAuthDisabled) { await ensureAnonymousUserExists() - return NextResponse.json(createAnonymousGetSessionResponse()) + return NextResponse.json(createAnonymousSession()) } return betterAuthGET(request) diff --git a/apps/sim/lib/auth/anonymous.ts b/apps/sim/lib/auth/anonymous.ts index 839e65487ec..7504ee7fd62 100644 --- a/apps/sim/lib/auth/anonymous.ts +++ b/apps/sim/lib/auth/anonymous.ts @@ -103,7 +103,3 @@ export function createAnonymousSession(): AnonymousSession { }, } } - -export function createAnonymousGetSessionResponse(): { data: AnonymousSession } { - return { data: createAnonymousSession() } -} From 9eeb1b2cdb7e731423beaa66e1055d1e91d958ae Mon Sep 17 00:00:00 2001 From: Vikhyath Mondreti Date: Mon, 4 May 2026 16:52:43 -0700 Subject: [PATCH 12/15] improvement(mothership): streaming state transitions (#4439) * improvement(mothership): improve streaming state transitions * address comments --- .../app/api/copilot/chat/stop/route.test.ts | 11 + apps/sim/app/api/copilot/chat/stop/route.ts | 1 + apps/sim/app/api/copilot/chat/stream/route.ts | 1 + apps/sim/app/api/mothership/events/route.ts | 1 + .../[workspaceId]/home/hooks/use-chat.ts | 1917 +++++++++++++---- apps/sim/hooks/use-task-events.test.ts | 358 ++- apps/sim/hooks/use-task-events.ts | 80 +- apps/sim/lib/copilot/chat/post.ts | 3 + apps/sim/lib/copilot/tasks.ts | 1 + apps/sim/lib/mothership/inbox/executor.ts | 5 +- 10 files changed, 1931 insertions(+), 447 deletions(-) diff --git a/apps/sim/app/api/copilot/chat/stop/route.test.ts b/apps/sim/app/api/copilot/chat/stop/route.test.ts index 0ac05257bf6..a87f35a2987 100644 --- a/apps/sim/app/api/copilot/chat/stop/route.test.ts +++ b/apps/sim/app/api/copilot/chat/stop/route.test.ts @@ -29,6 +29,16 @@ const { mockSql: vi.fn((strings: TemplateStringsArray, ...values: unknown[]) => ({ strings, values })), })) +vi.mock('@sim/db/schema', () => ({ + copilotChats: { + id: 'copilotChats.id', + userId: 'copilotChats.userId', + workspaceId: 'copilotChats.workspaceId', + messages: 'copilotChats.messages', + conversationId: 'copilotChats.conversationId', + }, +})) + vi.mock('@sim/db', () => ({ db: { select: mockSelect, @@ -140,6 +150,7 @@ describe('copilot chat stop route', () => { workspaceId: 'ws-1', chatId: 'chat-1', type: 'completed', + streamId: 'stream-1', }) }) }) diff --git a/apps/sim/app/api/copilot/chat/stop/route.ts b/apps/sim/app/api/copilot/chat/stop/route.ts index ad7da7386d1..36d3b8ae43d 100644 --- a/apps/sim/app/api/copilot/chat/stop/route.ts +++ b/apps/sim/app/api/copilot/chat/stop/route.ts @@ -111,6 +111,7 @@ export const POST = withRouteHandler((req: NextRequest) => workspaceId: updated.workspaceId, chatId, type: 'completed', + streamId, }) } diff --git a/apps/sim/app/api/copilot/chat/stream/route.ts b/apps/sim/app/api/copilot/chat/stream/route.ts index 04ba6109a56..7cc61cb6447 100644 --- a/apps/sim/app/api/copilot/chat/stream/route.ts +++ b/apps/sim/app/api/copilot/chat/stream/route.ts @@ -248,6 +248,7 @@ async function handleResumeRequestBody({ events: batchEvents, previewSessions, status: run.status, + ...(run.chatId ? { chatId: run.chatId } : {}), }) } diff --git a/apps/sim/app/api/mothership/events/route.ts b/apps/sim/app/api/mothership/events/route.ts index 420f2be3fdb..bb3e1f278c8 100644 --- a/apps/sim/app/api/mothership/events/route.ts +++ b/apps/sim/app/api/mothership/events/route.ts @@ -27,6 +27,7 @@ const mothershipEventsHandler = createWorkspaceSSE({ send('task_status', { chatId: event.chatId, type: event.type, + ...(event.streamId ? { streamId: event.streamId } : {}), timestamp: Date.now(), }) }) diff --git a/apps/sim/app/workspace/[workspaceId]/home/hooks/use-chat.ts b/apps/sim/app/workspace/[workspaceId]/home/hooks/use-chat.ts index d20ce7ac805..6d2c7527a6b 100644 --- a/apps/sim/app/workspace/[workspaceId]/home/hooks/use-chat.ts +++ b/apps/sim/app/workspace/[workspaceId]/home/hooks/use-chat.ts @@ -188,8 +188,16 @@ const RECONNECT_TAIL_ERROR = const MAX_RECONNECT_ATTEMPTS = 10 const RECONNECT_BASE_DELAY_MS = 1000 const RECONNECT_MAX_DELAY_MS = 30_000 +const STREAM_BATCH_FETCH_TIMEOUT_MS = 10_000 +const STREAM_CHAT_ID_RESOLVE_TIMEOUT_MS = 10_000 +const CHAT_HISTORY_RECOVERY_TIMEOUT_MS = 10_000 +const STOP_REQUEST_TIMEOUT_MS = 15_000 const QUEUED_SEND_HANDOFF_STORAGE_KEY = `${STREAM_STORAGE_KEY}:queued-send-handoff` const QUEUED_SEND_HANDOFF_CLAIM_STORAGE_KEY = `${STREAM_STORAGE_KEY}:queued-send-handoff-claim` +const QUEUED_SEND_HANDOFF_TTL_MS = 5 * 60 * 1000 +const QUEUED_SEND_HANDOFF_CLAIM_TTL_MS = 30_000 +const QUEUED_SEND_HANDOFF_RETRY_BASE_MS = 1000 +const QUEUED_SEND_HANDOFF_RETRY_MAX_MS = 30_000 const logger = createLogger('useChat') @@ -208,7 +216,7 @@ type ActiveTurn = { interface QueuedSendHandoffState { id: string - chatId: string + chatId?: string workspaceId: string supersededStreamId: string | null userMessageId: string @@ -216,15 +224,152 @@ interface QueuedSendHandoffState { fileAttachments?: FileAttachmentForApi[] contexts?: ChatContext[] requestedAt: number + resolveAttempts?: number } interface QueuedSendHandoffSeed { id: string - chatId: string + chatId?: string supersededStreamId: string | null userMessageId?: string } +type QueuedChatMessage = QueuedMessage & { + queuedSendHandoff?: QueuedSendHandoffSeed +} + +interface QueuedSendHandoffClaim { + id: string + ownerId: string + claimedAt: number +} + +interface ActiveQueuedSendHandoffRecovery { + id: string + ownerId: string +} + +function createTimeoutSignal(ms: number): AbortSignal | undefined { + if (typeof AbortSignal !== 'undefined' && typeof AbortSignal.timeout === 'function') { + return AbortSignal.timeout(ms) + } + if (typeof AbortController === 'undefined') return undefined + + const controller = new AbortController() + const timeout = setTimeout(() => { + controller.abort(new Error(`Operation timed out after ${ms}ms`)) + }, ms) + controller.signal.addEventListener('abort', () => clearTimeout(timeout), { once: true }) + return controller.signal +} + +function combineAbortSignals(...signals: (AbortSignal | undefined)[]): AbortSignal | undefined { + const activeSignals = signals.filter((signal): signal is AbortSignal => Boolean(signal)) + if (activeSignals.length === 0) return undefined + if (activeSignals.length === 1) return activeSignals[0] + if (typeof AbortSignal !== 'undefined' && typeof AbortSignal.any === 'function') { + return AbortSignal.any(activeSignals) + } + if (typeof AbortController === 'undefined') return activeSignals[0] + + const controller = new AbortController() + const abortFromSource = (source: AbortSignal) => { + cleanup() + controller.abort(source.reason) + } + const listeners = activeSignals.map((signal) => { + const listener = () => abortFromSource(signal) + signal.addEventListener('abort', listener, { once: true }) + return { signal, listener } + }) + function cleanup() { + for (const { signal, listener } of listeners) { + signal.removeEventListener('abort', listener) + } + } + for (const signal of activeSignals) { + if (signal.aborted) { + abortFromSource(signal) + break + } + } + controller.signal.addEventListener('abort', cleanup, { once: true }) + return controller.signal +} + +function createAbortError(signal: AbortSignal): Error { + const error = new Error(signal.reason ? String(signal.reason) : 'Operation aborted') + error.name = 'AbortError' + return error +} + +async function sleepWithAbort(ms: number, signal?: AbortSignal) { + if (!signal) { + await sleep(ms) + return + } + if (signal.aborted) throw createAbortError(signal) + + let cleanup: (() => void) | undefined + await Promise.race([ + sleep(ms), + new Promise((_, reject) => { + const onAbort = () => reject(createAbortError(signal)) + cleanup = () => signal.removeEventListener('abort', onAbort) + signal.addEventListener('abort', onAbort, { once: true }) + }), + ]).finally(() => cleanup?.()) +} + +function isFileAttachmentForApi(value: unknown): value is FileAttachmentForApi { + if (!isRecord(value)) return false + return ( + typeof value.id === 'string' && + typeof value.key === 'string' && + typeof value.filename === 'string' && + typeof value.media_type === 'string' && + typeof value.size === 'number' && + Number.isFinite(value.size) && + (value.path === undefined || typeof value.path === 'string') + ) +} + +function isChatContext(value: unknown): value is ChatContext { + if (!isRecord(value) || typeof value.kind !== 'string' || typeof value.label !== 'string') { + return false + } + + switch (value.kind) { + case 'past_chat': + return typeof value.chatId === 'string' + case 'workflow': + case 'current_workflow': + return typeof value.workflowId === 'string' + case 'blocks': + return Array.isArray(value.blockIds) && value.blockIds.every((id) => typeof id === 'string') + case 'logs': + return value.executionId === undefined || typeof value.executionId === 'string' + case 'workflow_block': + return typeof value.workflowId === 'string' && typeof value.blockId === 'string' + case 'knowledge': + return value.knowledgeId === undefined || typeof value.knowledgeId === 'string' + case 'table': + return typeof value.tableId === 'string' + case 'file': + return typeof value.fileId === 'string' + case 'folder': + return typeof value.folderId === 'string' + case 'templates': + return value.templateId === undefined || typeof value.templateId === 'string' + case 'docs': + return true + case 'slash_command': + return typeof value.command === 'string' + default: + return false + } +} + function readQueuedSendHandoffState(): QueuedSendHandoffState | null { if (typeof window === 'undefined') return null @@ -233,30 +378,46 @@ function readQueuedSendHandoffState(): QueuedSendHandoffState | null { if (!raw) return null const parsed = JSON.parse(raw) as Partial + const chatId = typeof parsed.chatId === 'string' ? parsed.chatId : undefined + const supersededStreamId = + typeof parsed.supersededStreamId === 'string' ? parsed.supersededStreamId : null if ( typeof parsed?.id !== 'string' || - typeof parsed.chatId !== 'string' || typeof parsed.workspaceId !== 'string' || typeof parsed.userMessageId !== 'string' || typeof parsed.message !== 'string' || - typeof parsed.requestedAt !== 'number' + typeof parsed.requestedAt !== 'number' || + (!chatId && !supersededStreamId) ) { return null } + if (Date.now() - parsed.requestedAt > QUEUED_SEND_HANDOFF_TTL_MS) { + window.sessionStorage.removeItem(QUEUED_SEND_HANDOFF_STORAGE_KEY) + if (readQueuedSendHandoffClaim() === parsed.id) { + window.sessionStorage.removeItem(QUEUED_SEND_HANDOFF_CLAIM_STORAGE_KEY) + } + return null + } return { id: parsed.id, - chatId: parsed.chatId, + ...(chatId ? { chatId } : {}), workspaceId: parsed.workspaceId, - supersededStreamId: - typeof parsed.supersededStreamId === 'string' ? parsed.supersededStreamId : null, + supersededStreamId, userMessageId: parsed.userMessageId, message: parsed.message, ...(Array.isArray(parsed.fileAttachments) - ? { fileAttachments: parsed.fileAttachments as FileAttachmentForApi[] } + ? { fileAttachments: parsed.fileAttachments.filter(isFileAttachmentForApi) } + : {}), + ...(Array.isArray(parsed.contexts) + ? { contexts: parsed.contexts.filter(isChatContext) } : {}), - ...(Array.isArray(parsed.contexts) ? { contexts: parsed.contexts as ChatContext[] } : {}), requestedAt: parsed.requestedAt, + ...(typeof parsed.resolveAttempts === 'number' && + Number.isFinite(parsed.resolveAttempts) && + parsed.resolveAttempts > 0 + ? { resolveAttempts: parsed.resolveAttempts } + : {}), } } catch { return null @@ -279,21 +440,73 @@ function clearQueuedSendHandoffState(expectedId?: string) { window.sessionStorage.removeItem(QUEUED_SEND_HANDOFF_STORAGE_KEY) } -function readQueuedSendHandoffClaim(): string | null { +function readQueuedSendHandoffClaimState(): QueuedSendHandoffClaim | null { if (typeof window === 'undefined') return null - return window.sessionStorage.getItem(QUEUED_SEND_HANDOFF_CLAIM_STORAGE_KEY) + const raw = window.sessionStorage.getItem(QUEUED_SEND_HANDOFF_CLAIM_STORAGE_KEY) + if (!raw) return null + + try { + const parsed = JSON.parse(raw) as Partial + if ( + typeof parsed?.id !== 'string' || + typeof parsed.ownerId !== 'string' || + typeof parsed.claimedAt !== 'number' + ) { + window.sessionStorage.removeItem(QUEUED_SEND_HANDOFF_CLAIM_STORAGE_KEY) + return null + } + if (Date.now() - parsed.claimedAt > QUEUED_SEND_HANDOFF_CLAIM_TTL_MS) { + window.sessionStorage.removeItem(QUEUED_SEND_HANDOFF_CLAIM_STORAGE_KEY) + return null + } + return { id: parsed.id, ownerId: parsed.ownerId, claimedAt: parsed.claimedAt } + } catch { + window.sessionStorage.removeItem(QUEUED_SEND_HANDOFF_CLAIM_STORAGE_KEY) + return null + } } -function writeQueuedSendHandoffClaim(id: string) { - if (typeof window === 'undefined') return - window.sessionStorage.setItem(QUEUED_SEND_HANDOFF_CLAIM_STORAGE_KEY, id) +function readQueuedSendHandoffClaim(): string | null { + return readQueuedSendHandoffClaimState()?.id ?? null +} + +function hasQueuedSendHandoffClaimOwner(id: string, ownerId: string): boolean { + const claim = readQueuedSendHandoffClaimState() + return claim?.id === id && claim.ownerId === ownerId +} + +function queuedSendHandoffClaimRetryDelay(id: string): number | null { + const claim = readQueuedSendHandoffClaimState() + if (!claim || claim.id !== id) return null + const elapsed = Date.now() - claim.claimedAt + return Math.max(0, QUEUED_SEND_HANDOFF_CLAIM_TTL_MS - elapsed + 1) +} + +function queuedSendHandoffResolveRetryDelay(resolveAttempts: number): number { + return Math.min( + QUEUED_SEND_HANDOFF_RETRY_MAX_MS, + QUEUED_SEND_HANDOFF_RETRY_BASE_MS * 2 ** Math.max(0, resolveAttempts - 1) + ) +} + +function writeQueuedSendHandoffClaim(id: string): string { + const ownerId = generateId() + if (typeof window === 'undefined') return ownerId + window.sessionStorage.setItem( + QUEUED_SEND_HANDOFF_CLAIM_STORAGE_KEY, + JSON.stringify({ id, ownerId, claimedAt: Date.now() } satisfies QueuedSendHandoffClaim) + ) + return ownerId } -function clearQueuedSendHandoffClaim(expectedId?: string) { +function clearQueuedSendHandoffClaim(expectedId?: string, expectedOwnerId?: string) { if (typeof window === 'undefined') return if (expectedId) { - const current = readQueuedSendHandoffClaim() - if (current && current !== expectedId) { + const current = readQueuedSendHandoffClaimState() + if ( + current && + (current.id !== expectedId || (expectedOwnerId && current.ownerId !== expectedOwnerId)) + ) { return } } @@ -619,6 +832,7 @@ type StreamBatchResponse = { events: StreamBatchEvent[] previewSessions?: FilePreviewSession[] status: string + chatId?: string } function isRecord(value: unknown): value is Record { @@ -703,7 +917,25 @@ function parseStreamBatchResponse(value: unknown): StreamBatchResponse { events, ...(previewSessions ? { previewSessions } : {}), status: typeof value.status === 'string' ? value.status : 'unknown', + ...(typeof value.chatId === 'string' && value.chatId ? { chatId: value.chatId } : {}), + } +} + +function resolveChatIdFromStreamBatch(batch: StreamBatchResponse): string | undefined { + if (batch.chatId) return batch.chatId + + for (const { event } of batch.events) { + const streamChatId = typeof event.stream?.chatId === 'string' ? event.stream.chatId : undefined + if (streamChatId) return streamChatId + if ( + event.type === MothershipStreamV1EventType.session && + event.payload.kind === MothershipStreamV1SessionKind.chat + ) { + return event.payload.chatId + } } + + return undefined } function toRawPersistedContentBlock(block: ContentBlock): Record | null { @@ -890,6 +1122,28 @@ function isTerminalStreamStatus(status: string | null | undefined): boolean { return TERMINAL_STREAM_STATUSES.has(status ?? '') } +function isAlreadyProcessedStreamCursor( + eventCursor: string | undefined, + currentCursor: string +): boolean { + if (!eventCursor) return false + + const eventSequence = Number(eventCursor) + const currentSequence = Number(currentCursor) + return ( + Number.isFinite(eventSequence) && + Number.isFinite(currentSequence) && + eventSequence <= currentSequence + ) +} + +function buildRecoverySubjectKey( + chatId: string | undefined, + selectedChatId: string | undefined +): string { + return `${chatId ?? ''}:${selectedChatId ?? ''}` +} + const sseEncoder = new TextEncoder() function buildReplayStream(events: StreamBatchEvent[]): ReadableStream { return new ReadableStream({ @@ -1026,6 +1280,18 @@ export interface UseChatOptions { initialActiveResourceId?: string | null } +interface ActiveStreamRecovery { + subjectKey: string + controller: AbortController + promise: Promise +} + +type StopGenerationMode = 'normal' | 'queued-handoff' + +interface StopGenerationOptions { + mode?: StopGenerationMode +} + export function getMothershipUseChatOptions( options: Pick = {} ): UseChatOptions { @@ -1062,6 +1328,7 @@ export function useChat( const [isReconnecting, setIsReconnecting] = useState(false) const [error, setError] = useState(null) const [resolvedChatId, setResolvedChatId] = useState(initialChatId) + const [queuedHandoffRecoveryEpoch, setQueuedHandoffRecoveryEpoch] = useState(0) const [resources, setResources] = useState([]) const [activeResourceId, setActiveResourceId] = useState( options?.initialActiveResourceId ?? null @@ -1075,6 +1342,7 @@ export function useChat( const stopPathRef = useRef(options?.stopPath ?? '/api/mothership/chat/stop') stopPathRef.current = options?.stopPath ?? '/api/mothership/chat/stop' const pendingStopPromiseRef = useRef | null>(null) + const pendingStopModeRef = useRef(null) const workflowIdRef = useRef(options?.workflowId) workflowIdRef.current = options?.workflowId const onToolResultRef = useRef(options?.onToolResult) @@ -1208,8 +1476,8 @@ export function useChat( [removePreviewSession, syncPreviewSessionRefs] ) - const [messageQueue, setMessageQueue] = useState([]) - const messageQueueRef = useRef([]) + const [messageQueue, setMessageQueue] = useState([]) + const messageQueueRef = useRef([]) messageQueueRef.current = messageQueue const queuedMessageDispatchIdsRef = useRef>(new Set()) const queueDispatchActionsRef = useRef([]) @@ -1225,7 +1493,12 @@ export function useChat( reader: ReadableStreamDefaultReader, assistantId: string, expectedGen?: number, - options?: { preserveExistingState?: boolean; suppressWorkflowToolStarts?: boolean } + options?: { + preserveExistingState?: boolean + suppressWorkflowToolStarts?: boolean + targetChatId?: string + shouldContinue?: () => boolean + } ) => Promise<{ sawStreamError: boolean; sawComplete: boolean }> >(async () => ({ sawStreamError: false, sawComplete: false })) const attachToExistingStreamRef = useRef< @@ -1235,13 +1508,23 @@ export function useChat( expectedGen: number initialBatch?: StreamBatchResponse | null afterCursor?: string + targetChatId?: string + shouldContinue?: () => boolean }) => Promise<{ error: boolean; aborted: boolean }> >(async () => ({ error: false, aborted: true })) const retryReconnectRef = useRef< - (opts: { streamId: string; assistantId: string; gen: number }) => Promise + (opts: { + streamId: string + assistantId: string + gen: number + targetChatId?: string + shouldContinue?: () => boolean + }) => Promise >(async () => false) - const finalizeRef = useRef<(options?: { error?: boolean }) => void>(() => {}) - const recoveringQueuedSendHandoffIdRef = useRef(null) + const finalizeRef = useRef<(options?: { error?: boolean; targetChatId?: string }) => void>( + () => {} + ) + const recoveringQueuedSendHandoffRef = useRef(null) const resetEphemeralPreviewState = useCallback( (options?: { removeStreamingResource?: boolean }) => { @@ -1327,6 +1610,7 @@ export function useChat( const streamRequestIdRef = useRef(undefined) const locallyTerminalStreamIdRef = useRef(undefined) const lastCursorRef = useRef('0') + const activeStreamReturnRecoveryRef = useRef(null) const sendingRef = useRef(false) const streamGenRef = useRef(0) const streamingContentRef = useRef('') @@ -1354,6 +1638,23 @@ export function useChat( setIsReconnecting(true) }, []) + const cancelActiveStreamRecovery = useCallback(() => { + const recovery = activeStreamReturnRecoveryRef.current + if (!recovery) return + recovery.controller.abort('superseded_recovery') + activeStreamReturnRecoveryRef.current = null + }, []) + + const cancelActiveStreamReader = useCallback(() => { + const reader = streamReaderRef.current + streamReaderRef.current = null + void reader?.cancel().catch((error) => { + logger.warn('Failed to cancel detached stream reader', { + error: toError(error).message, + }) + }) + }, []) + const resetStreamingBuffers = useCallback(() => { streamingContentRef.current = '' streamingBlocksRef.current = [] @@ -1371,7 +1672,9 @@ export function useChat( }, [resetStreamingBuffers]) const resetHomeChatState = useCallback(() => { + cancelActiveStreamRecovery() streamGenRef.current++ + cancelActiveStreamReader() chatIdRef.current = undefined lastCursorRef.current = '0' locallyTerminalStreamIdRef.current = undefined @@ -1387,7 +1690,36 @@ export function useChat( resetEphemeralPreviewState() setMessageQueue([]) clearQueueDispatchState() - }, [clearActiveTurn, clearQueueDispatchState, resetEphemeralPreviewState, setTransportIdle]) + }, [ + cancelActiveStreamRecovery, + cancelActiveStreamReader, + clearActiveTurn, + clearQueueDispatchState, + resetEphemeralPreviewState, + setTransportIdle, + ]) + + const adoptResolvedChatId = useCallback( + (chatId: string, options?: { replaceHomeHistory?: boolean; invalidateList?: boolean }) => { + const selectedChatId = selectedChatIdRef.current + chatIdRef.current = chatId + if (!selectedChatId || selectedChatId === chatId) { + setResolvedChatId(chatId) + } + if ( + options?.replaceHomeHistory && + !selectedChatId && + !workflowIdRef.current && + typeof window !== 'undefined' + ) { + window.history.replaceState(null, '', `/workspace/${workspaceId}/task/${chatId}`) + } + if (options?.invalidateList) { + queryClient.invalidateQueries({ queryKey: taskKeys.list(workspaceId) }) + } + }, + [queryClient, workspaceId] + ) const { data: chatHistory } = useChatHistory(resolvedChatId) const messages = useMemo(() => { @@ -1545,7 +1877,9 @@ export function useChat( const abandonedChatId = streamOwnerId // Detach the current UI from the old stream without cancelling it on the server. // Reopening that chat later will reconnect through the existing chatHistory flow. + cancelActiveStreamRecovery() streamGenRef.current++ + cancelActiveStreamReader() abortControllerRef.current = null clearActiveTurn() setTransportIdle() @@ -1557,6 +1891,8 @@ export function useChat( return } } + cancelActiveStreamRecovery() + cancelActiveStreamReader() chatIdRef.current = initialChatId lastCursorRef.current = '0' locallyTerminalStreamIdRef.current = undefined @@ -1578,6 +1914,8 @@ export function useChat( clearQueueDispatchState, clearActiveTurn, setTransportIdle, + cancelActiveStreamRecovery, + cancelActiveStreamReader, ]) useEffect(() => { @@ -1655,9 +1993,12 @@ export function useChat( if (shouldReconnectActiveStream && activeStreamId) { const gen = ++streamGenRef.current const abortController = new AbortController() + const previousStreamId = streamIdRef.current ?? activeTurnRef.current?.userMessageId + const reconnectAfterCursor = + previousStreamId === activeStreamId ? lastCursorRef.current || '0' : '0' abortControllerRef.current = abortController streamIdRef.current = activeStreamId - lastCursorRef.current = '0' + lastCursorRef.current = reconnectAfterCursor setTransportReconnecting() const assistantId = getLiveAssistantMessageId(activeStreamId) @@ -1668,21 +2009,34 @@ export function useChat( ? (initialSnapshot.events as StreamBatchEvent[]) : [] - const reconnectResult = - snapshotEvents.length > 0 - ? await attachToExistingStreamRef.current({ - streamId: activeStreamId, - assistantId, - expectedGen: gen, - initialBatch: { - success: true, - events: snapshotEvents, - previewSessions: snapshotPreviewSessions, - status: initialSnapshot?.status ?? 'unknown', - }, - afterCursor: String(snapshotEvents[snapshotEvents.length - 1]?.eventId ?? '0'), - }) - : null + let reconnectResult: Awaited> | null = + null + const replaySnapshotEvents = snapshotEvents.filter( + (entry) => !isAlreadyProcessedStreamCursor(String(entry.eventId), reconnectAfterCursor) + ) + if (replaySnapshotEvents.length > 0) { + try { + reconnectResult = await attachToExistingStreamRef.current({ + streamId: activeStreamId, + assistantId, + expectedGen: gen, + initialBatch: { + success: true, + events: replaySnapshotEvents, + previewSessions: snapshotPreviewSessions, + status: initialSnapshot?.status ?? 'unknown', + }, + afterCursor: reconnectAfterCursor, + targetChatId: chatHistory.id, + }) + } catch (error) { + logger.warn('Snapshot stream reconnect failed; falling back to retry', { + chatId: chatHistory.id, + streamId: activeStreamId, + error: toError(error).message, + }) + } + } const succeeded = reconnectResult !== null @@ -1691,9 +2045,10 @@ export function useChat( streamId: activeStreamId, assistantId, gen, + targetChatId: chatHistory.id, }) if (succeeded && streamGenRef.current === gen && sendingRef.current) { - finalizeRef.current() + finalizeRef.current({ targetChatId: chatHistory.id }) return } if (succeeded && streamGenRef.current === gen) { @@ -1703,7 +2058,7 @@ export function useChat( } if (!succeeded && streamGenRef.current === gen) { try { - finalizeRef.current({ error: true }) + finalizeRef.current({ error: true, targetChatId: chatHistory.id }) } catch { setTransportIdle() abortControllerRef.current = null @@ -1728,9 +2083,21 @@ export function useChat( reader: ReadableStreamDefaultReader, assistantId: string, expectedGen?: number, - options?: { preserveExistingState?: boolean; suppressWorkflowToolStarts?: boolean } + options?: { + preserveExistingState?: boolean + suppressWorkflowToolStarts?: boolean + targetChatId?: string + shouldContinue?: () => boolean + } ) => { const decoder = new TextDecoder() + const isReaderStale = () => + (expectedGen !== undefined && streamGenRef.current !== expectedGen) || + options?.shouldContinue?.() === false + if (isReaderStale()) { + void reader.cancel().catch(() => {}) + return { sawStreamError: false, sawComplete: false } + } streamReaderRef.current = reader let buffer = '' @@ -1886,7 +2253,7 @@ export function useChat( })}` } - const isStale = () => expectedGen !== undefined && streamGenRef.current !== expectedGen + const isStale = isReaderStale let sawStreamError = false let sawCompleteEvent = false let scheduledTextFlushFrame: number | null = null @@ -1899,7 +2266,7 @@ export function useChat( [assistantId, streamRequestId], runningText ) - const activeChatId = chatIdRef.current + const activeChatId = options?.targetChatId ?? chatIdRef.current if (!activeChatId) { const snapshot: Partial = { content: runningText, @@ -2001,10 +2368,14 @@ export function useChat( if (parsed.stream?.streamId) { streamIdRef.current = parsed.stream.streamId } - if (parsed.stream?.cursor) { - lastCursorRef.current = parsed.stream.cursor - } else if (typeof parsed.seq === 'number') { - lastCursorRef.current = String(parsed.seq) + const eventCursor = + parsed.stream?.cursor ?? + (typeof parsed.seq === 'number' ? String(parsed.seq) : undefined) + if (isAlreadyProcessedStreamCursor(eventCursor, lastCursorRef.current)) { + continue + } + if (eventCursor) { + lastCursorRef.current = eventCursor } logger.debug('SSE event received', parsed) @@ -2729,14 +3100,19 @@ export function useChat( processSSEStreamRef.current = processSSEStream const getActiveStreamIdForChat = useCallback( - async (chatId: string): Promise => { + async (chatId: string, signal?: AbortSignal): Promise => { const cached = queryClient.getQueryData(taskKeys.detail(chatId)) if (cached?.activeStreamId) { return cached.activeStreamId } try { - const history = await fetchChatHistory(chatId) + const fetchSignal = combineAbortSignals( + signal, + createTimeoutSignal(CHAT_HISTORY_RECOVERY_TIMEOUT_MS) + ) + const history = await fetchChatHistory(chatId, fetchSignal) + if (signal?.aborted || fetchSignal?.aborted) return null queryClient.setQueryData(taskKeys.detail(chatId), history) return history.activeStreamId ?? null } catch (error) { @@ -2756,11 +3132,15 @@ export function useChat( afterCursor: string, signal?: AbortSignal ): Promise => { + const fetchSignal = combineAbortSignals( + signal, + createTimeoutSignal(STREAM_BATCH_FETCH_TIMEOUT_MS) + ) // boundary-raw-fetch: stream-resume batch endpoint requires dynamic per-request traceparent header propagation that the contract layer does not model, and the response is consumed alongside live SSE tail fetches const response = await fetch( `/api/mothership/chat/stream?streamId=${encodeURIComponent(streamId)}&after=${encodeURIComponent(afterCursor)}&batch=true`, { - signal, + signal: fetchSignal, ...(streamTraceparentRef.current ? { headers: { traceparent: streamTraceparentRef.current } } : {}), @@ -2769,11 +3149,69 @@ export function useChat( if (!response.ok) { throw new Error(`Stream resume batch failed: ${response.status}`) } - const batch = parseStreamBatchResponse(await response.json()) + return parseStreamBatchResponse(await response.json()) + }, + [] + ) + + const resolveChatIdForStream = useCallback( + async ( + streamId: string, + options?: { preferExistingChatId?: boolean; signal?: AbortSignal } + ): Promise => { + if (options?.preferExistingChatId !== false) { + const existingChatId = chatIdRef.current ?? selectedChatIdRef.current + if (existingChatId) return existingChatId + } + + const deadline = Date.now() + STREAM_CHAT_ID_RESOLVE_TIMEOUT_MS + let retryDelayMs = 250 + let lastError: unknown + + while (Date.now() < deadline) { + if (options?.signal?.aborted) throw createAbortError(options.signal) + const remainingMs = Math.max(1, deadline - Date.now()) + try { + const batch = await fetchStreamBatch( + streamId, + '0', + combineAbortSignals( + options?.signal, + createTimeoutSignal(Math.min(remainingMs, STREAM_BATCH_FETCH_TIMEOUT_MS)) + ) + ) + const chatId = resolveChatIdFromStreamBatch(batch) + if (chatId) return chatId + } catch (error) { + lastError = error + if (error instanceof Error && error.name === 'AbortError' && Date.now() >= deadline) { + break + } + } + + await sleepWithAbort( + Math.min(retryDelayMs, Math.max(1, deadline - Date.now())), + options?.signal + ) + retryDelayMs = Math.min(retryDelayMs * 2, 2000) + } + + if (lastError) { + logger.warn('Failed to resolve chat id for stream before timeout', { + streamId, + error: toError(lastError).message, + }) + } + return undefined + }, + [fetchStreamBatch] + ) + + const seedStreamBatchPreviewSessions = useCallback( + (batch: StreamBatchResponse) => { if (Array.isArray(batch.previewSessions) && batch.previewSessions.length > 0) { seedPreviewSessions(batch.previewSessions) } - return batch }, [seedPreviewSessions] ) @@ -2785,15 +3223,26 @@ export function useChat( expectedGen: number initialBatch?: StreamBatchResponse | null afterCursor?: string + targetChatId?: string + shouldContinue?: () => boolean }): Promise<{ error: boolean; aborted: boolean }> => { - const { streamId, assistantId, expectedGen, afterCursor = '0' } = opts + const { + streamId, + assistantId, + expectedGen, + afterCursor = '0', + targetChatId, + shouldContinue, + } = opts let latestCursor = afterCursor let seedEvents = opts.initialBatch?.events ?? [] let streamStatus = opts.initialBatch?.status ?? 'unknown' let suppressSeedWorkflowStarts = seedEvents.length > 0 const isStaleReconnect = () => - streamGenRef.current !== expectedGen || abortControllerRef.current?.signal.aborted === true + streamGenRef.current !== expectedGen || + abortControllerRef.current?.signal.aborted === true || + shouldContinue?.() === false if (isStaleReconnect()) { return { error: false, aborted: true } @@ -2812,8 +3261,13 @@ export function useChat( { preserveExistingState: true, suppressWorkflowToolStarts: suppressSeedWorkflowStarts, + ...(targetChatId ? { targetChatId } : {}), + ...(shouldContinue ? { shouldContinue } : {}), } ) + if (isStaleReconnect()) { + return { error: false, aborted: true } + } latestCursor = String(seedEvents[seedEvents.length - 1]?.eventId ?? latestCursor) lastCursorRef.current = latestCursor seedEvents = [] @@ -2862,7 +3316,11 @@ export function useChat( sseRes.body.getReader(), assistantId, expectedGen, - { preserveExistingState: true } + { + preserveExistingState: true, + ...(targetChatId ? { targetChatId } : {}), + ...(shouldContinue ? { shouldContinue } : {}), + } ) if (liveResult.sawStreamError) { @@ -2887,12 +3345,15 @@ export function useChat( }) const batch = await fetchStreamBatch(streamId, latestCursor, activeAbort.signal) + if (isStaleReconnect()) { + return { error: false, aborted: true } + } + seedStreamBatchPreviewSessions(batch) seedEvents = batch.events streamStatus = batch.status if (batch.events.length > 0) { latestCursor = String(batch.events[batch.events.length - 1].eventId) - lastCursorRef.current = latestCursor } if (batch.events.length === 0 && !isTerminalStreamStatus(batch.status)) { @@ -2918,7 +3379,13 @@ export function useChat( } } }, - [fetchStreamBatch, setTransportIdle, setTransportReconnecting, setTransportStreaming] + [ + fetchStreamBatch, + seedStreamBatchPreviewSessions, + setTransportIdle, + setTransportReconnecting, + setTransportStreaming, + ] ) attachToExistingStreamRef.current = attachToExistingStream @@ -2929,11 +3396,14 @@ export function useChat( gen: number afterCursor: string signal?: AbortSignal + targetChatId?: string + shouldContinue?: () => boolean }): Promise => { - const { streamId, assistantId, gen, afterCursor, signal } = opts + const { streamId, assistantId, gen, afterCursor, signal, targetChatId, shouldContinue } = opts const batch = await fetchStreamBatch(streamId, afterCursor, signal) - if (streamGenRef.current !== gen) return + if (streamGenRef.current !== gen || shouldContinue?.() === false) return + seedStreamBatchPreviewSessions(batch) if (isTerminalStreamStatus(batch.status)) { if (batch.events.length > 0) { @@ -2941,10 +3411,18 @@ export function useChat( buildReplayStream(batch.events).getReader(), assistantId, gen, - { preserveExistingState: true } + { + preserveExistingState: true, + ...(targetChatId ? { targetChatId } : {}), + ...(shouldContinue ? { shouldContinue } : {}), + } ) } - finalizeRef.current(batch.status === 'error' ? { error: true } : undefined) + if (streamGenRef.current !== gen || shouldContinue?.() === false) return + finalizeRef.current({ + ...(batch.status === 'error' ? { error: true } : {}), + ...(targetChatId ? { targetChatId } : {}), + }) return } @@ -2953,27 +3431,49 @@ export function useChat( assistantId, expectedGen: gen, initialBatch: batch, + ...(targetChatId ? { targetChatId } : {}), + ...(shouldContinue ? { shouldContinue } : {}), afterCursor: batch.events.length > 0 ? String(batch.events[batch.events.length - 1].eventId) : afterCursor, }) - if (streamGenRef.current === gen && !reconnectResult.aborted) { - finalizeRef.current(reconnectResult.error ? { error: true } : undefined) - } else if (streamGenRef.current === gen && reconnectResult.aborted && !sendingRef.current) { + if ( + streamGenRef.current === gen && + !reconnectResult.aborted && + shouldContinue?.() !== false + ) { + finalizeRef.current({ + ...(reconnectResult.error ? { error: true } : {}), + ...(targetChatId ? { targetChatId } : {}), + }) + } else if ( + streamGenRef.current === gen && + reconnectResult.aborted && + !sendingRef.current && + shouldContinue?.() !== false + ) { setTransportIdle() } }, - [fetchStreamBatch, attachToExistingStream, setTransportIdle] + [fetchStreamBatch, seedStreamBatchPreviewSessions, attachToExistingStream, setTransportIdle] ) const retryReconnect = useCallback( - async (opts: { streamId: string; assistantId: string; gen: number }): Promise => { - const { streamId, assistantId, gen } = opts + async (opts: { + streamId: string + assistantId: string + gen: number + targetChatId?: string + shouldContinue?: () => boolean + }): Promise => { + const { streamId, assistantId, gen, targetChatId, shouldContinue } = opts const isStaleReconnect = () => - streamGenRef.current !== gen || abortControllerRef.current?.signal.aborted === true + streamGenRef.current !== gen || + abortControllerRef.current?.signal.aborted === true || + shouldContinue?.() === false for (let attempt = 0; attempt <= MAX_RECONNECT_ATTEMPTS; attempt++) { if (isStaleReconnect()) return true @@ -2993,16 +3493,14 @@ export function useChat( if (isStaleReconnect()) return true setTransportReconnecting() - await sleep(delayMs) - if (streamGenRef.current !== gen) { - if (!sendingRef.current) { - setTransportIdle() - } else { - setIsReconnecting(false) + try { + await sleepWithAbort(delayMs, abortControllerRef.current?.signal) + } catch (err) { + if (!(err instanceof Error) || err.name !== 'AbortError') { + throw err } - return true } - if (abortControllerRef.current?.signal.aborted) { + if (isStaleReconnect()) { if (!sendingRef.current) { setTransportIdle() } else { @@ -3019,6 +3517,8 @@ export function useChat( gen, afterCursor: lastCursorRef.current || '0', signal: abortControllerRef.current?.signal, + ...(targetChatId ? { targetChatId } : {}), + ...(shouldContinue ? { shouldContinue } : {}), }) if (streamGenRef.current !== gen) { if (!sendingRef.current) { @@ -3081,68 +3581,236 @@ export function useChat( ) retryReconnectRef.current = retryReconnect - const persistPartialResponse = useCallback( - async (overrides?: { - chatId?: string - streamId?: string - content?: string - blocks?: ContentBlock[] - // `stopGeneration` must snapshot these BEFORE clearActiveTurn() - // nulls the refs, or the fetch sees undefined. - requestId?: string - traceparent?: string - }) => { - const chatId = overrides?.chatId ?? chatIdRef.current - const streamId = overrides?.streamId ?? streamIdRef.current - if (!chatId || !streamId) return - - const content = overrides?.content ?? streamingContentRef.current - const requestId = overrides?.requestId ?? streamRequestIdRef.current - const traceparent = overrides?.traceparent ?? streamTraceparentRef.current + const recoverActiveStreamFromRedis = useCallback( + async (reason: 'pageshow' | 'visible' | 'online'): Promise => { + const startingChatId = chatIdRef.current + const startingSelectedChatId = selectedChatIdRef.current + const chatId = startingChatId ?? startingSelectedChatId + if (!chatId) return + + const subjectKey = buildRecoverySubjectKey(startingChatId, startingSelectedChatId) + const existingRecovery = activeStreamReturnRecoveryRef.current + if (existingRecovery?.subjectKey === subjectKey) { + return existingRecovery.promise + } + if (existingRecovery) { + existingRecovery.controller.abort('replaced_by_new_recovery_subject') + activeStreamReturnRecoveryRef.current = null + } - const sourceBlocks = overrides?.blocks ?? streamingBlocksRef.current - const storedBlocks = sourceBlocks.map((block) => { - const timing = { - ...(typeof block.timestamp === 'number' ? { timestamp: block.timestamp } : {}), - ...(typeof block.endedAt === 'number' ? { endedAt: block.endedAt } : {}), + const recoveryController = new AbortController() + const recovery = (async () => { + const observedGeneration = streamGenRef.current + const isSameRecoverySubject = () => + chatIdRef.current === startingChatId && + selectedChatIdRef.current === startingSelectedChatId && + !recoveryController.signal.aborted + + const cached = queryClient.getQueryData(taskKeys.detail(chatId)) + let streamId = + streamIdRef.current ?? activeTurnRef.current?.userMessageId ?? cached?.activeStreamId + if (!streamId) { + streamId = + (await getActiveStreamIdForChat(chatId, recoveryController.signal)) ?? undefined } - if (block.type === 'tool_call' && block.toolCall) { - const isCancelled = - block.toolCall.status === 'executing' || block.toolCall.status === 'cancelled' - const displayTitle = isCancelled ? 'Stopped by user' : block.toolCall.displayTitle - const display = displayTitle ? { title: displayTitle } : undefined - return { - type: block.type, - content: block.content, - toolCall: { - id: block.toolCall.id, - name: block.toolCall.name, - state: isCancelled ? MothershipStreamV1ToolOutcome.cancelled : block.toolCall.status, - params: block.toolCall.params, - result: block.toolCall.result, - ...(display ? { display } : {}), - calledBy: block.toolCall.calledBy, - }, - ...(block.parentToolCallId ? { parentToolCallId: block.parentToolCallId } : {}), - ...timing, - } + if ( + !isSameRecoverySubject() || + streamGenRef.current !== observedGeneration || + pendingStopPromiseRef.current !== null || + !streamId || + locallyTerminalStreamIdRef.current === streamId + ) { + return } - return { - type: block.type, - content: block.content, - ...(block.subagent ? { lane: 'subagent' } : {}), - ...(block.parentToolCallId ? { parentToolCallId: block.parentToolCallId } : {}), - ...timing, + + const recoveryGen = observedGeneration + 1 + streamGenRef.current = recoveryGen + setTransportReconnecting() + streamIdRef.current = streamId + + const replacedController = abortControllerRef.current + if (replacedController && !replacedController.signal.aborted) { + replacedController.abort('superseded_recovery') } - }) - if (storedBlocks.length > 0) { - storedBlocks.push({ type: 'stopped', content: undefined }) - } + const replacedReader = streamReaderRef.current + streamReaderRef.current = null + void replacedReader?.cancel().catch((error) => { + logger.warn('Failed to cancel superseded stream reader during recovery', { + chatId, + streamId, + error: toError(error).message, + }) + }) + + abortControllerRef.current = recoveryController + + logger.info('Recovering active stream after browser return', { + reason, + chatId, + streamId, + fromGeneration: observedGeneration, + toGeneration: recoveryGen, + }) + + if ( + streamGenRef.current !== recoveryGen || + pendingStopPromiseRef.current !== null || + !isSameRecoverySubject() + ) { + return + } + if (locallyTerminalStreamIdRef.current === streamId) return + + const assistantId = getLiveAssistantMessageId(streamId) + const afterCursor = lastCursorRef.current || '0' + + try { + await resumeOrFinalize({ + streamId, + assistantId, + gen: recoveryGen, + afterCursor, + signal: recoveryController.signal, + targetChatId: chatId, + shouldContinue: isSameRecoverySubject, + }) + } catch (error) { + if (error instanceof Error && error.name === 'AbortError') { + return + } + logger.warn('Active stream recovery failed', { + reason, + chatId, + streamId, + error: toError(error).message, + }) + + const succeeded = await retryReconnectRef.current({ + streamId, + assistantId, + gen: recoveryGen, + targetChatId: chatId, + shouldContinue: isSameRecoverySubject, + }) + if (!succeeded && streamGenRef.current === recoveryGen && isSameRecoverySubject()) { + finalizeRef.current({ error: true, targetChatId: chatId }) + } + } + })() + + activeStreamReturnRecoveryRef.current = { + subjectKey, + controller: recoveryController, + promise: recovery, + } + try { + await recovery + } finally { + if (activeStreamReturnRecoveryRef.current?.promise === recovery) { + activeStreamReturnRecoveryRef.current = null + } + } + }, + [getActiveStreamIdForChat, queryClient, resumeOrFinalize, setTransportReconnecting] + ) + + useEffect(() => { + if (typeof window === 'undefined' || typeof document === 'undefined') return + + const recoverIfChatSelected = (reason: 'pageshow' | 'visible' | 'online') => { + if (!chatIdRef.current && !selectedChatIdRef.current) return + void recoverActiveStreamFromRedis(reason) + } + + const handleVisibilityChange = () => { + if (document.visibilityState === 'visible') { + recoverIfChatSelected('visible') + } + } + + const handlePageShow = () => { + recoverIfChatSelected('pageshow') + } + + const handleOnline = () => { + recoverIfChatSelected('online') + } + + document.addEventListener('visibilitychange', handleVisibilityChange) + window.addEventListener('pageshow', handlePageShow) + window.addEventListener('online', handleOnline) + + return () => { + document.removeEventListener('visibilitychange', handleVisibilityChange) + window.removeEventListener('pageshow', handlePageShow) + window.removeEventListener('online', handleOnline) + } + }, [recoverActiveStreamFromRedis]) + + const persistPartialResponse = useCallback( + async (overrides?: { + chatId?: string + streamId?: string + content?: string + blocks?: ContentBlock[] + // `stopGeneration` must snapshot these BEFORE clearActiveTurn() + // nulls the refs, or the fetch sees undefined. + requestId?: string + traceparent?: string + }) => { + const chatId = overrides?.chatId ?? chatIdRef.current + const streamId = overrides?.streamId ?? streamIdRef.current + if (!chatId || !streamId) return + + const content = overrides?.content ?? streamingContentRef.current + const requestId = overrides?.requestId ?? streamRequestIdRef.current + const traceparent = overrides?.traceparent ?? streamTraceparentRef.current + + const sourceBlocks = overrides?.blocks ?? streamingBlocksRef.current + const storedBlocks = sourceBlocks.map((block) => { + const timing = { + ...(typeof block.timestamp === 'number' ? { timestamp: block.timestamp } : {}), + ...(typeof block.endedAt === 'number' ? { endedAt: block.endedAt } : {}), + } + if (block.type === 'tool_call' && block.toolCall) { + const isCancelled = + block.toolCall.status === 'executing' || block.toolCall.status === 'cancelled' + const displayTitle = isCancelled ? 'Stopped by user' : block.toolCall.displayTitle + const display = displayTitle ? { title: displayTitle } : undefined + return { + type: block.type, + content: block.content, + toolCall: { + id: block.toolCall.id, + name: block.toolCall.name, + state: isCancelled ? MothershipStreamV1ToolOutcome.cancelled : block.toolCall.status, + params: block.toolCall.params, + result: block.toolCall.result, + ...(display ? { display } : {}), + calledBy: block.toolCall.calledBy, + }, + ...(block.parentToolCallId ? { parentToolCallId: block.parentToolCallId } : {}), + ...timing, + } + } + return { + type: block.type, + content: block.content, + ...(block.subagent ? { lane: 'subagent' } : {}), + ...(block.parentToolCallId ? { parentToolCallId: block.parentToolCallId } : {}), + ...timing, + } + }) + + if (storedBlocks.length > 0) { + storedBlocks.push({ type: 'stopped', content: undefined }) + } try { const res = await fetch(stopPathRef.current, { method: 'POST', + signal: createTimeoutSignal(STOP_REQUEST_TIMEOUT_MS), headers: { 'Content-Type': 'application/json', ...(traceparent ? { traceparent } : {}), @@ -3176,8 +3844,8 @@ export function useChat( ) const invalidateChatQueries = useCallback( - (options?: { includeDetail?: boolean }) => { - const activeChatId = chatIdRef.current + (options?: { includeDetail?: boolean; targetChatId?: string }) => { + const activeChatId = options?.targetChatId ?? chatIdRef.current if (options?.includeDetail !== false && activeChatId) { queryClient.invalidateQueries({ queryKey: taskKeys.detail(activeChatId), @@ -3217,8 +3885,45 @@ export function useChat( [] ) + const createQueuedMessage = useCallback( + ( + message: string, + fileAttachments?: FileAttachmentForApi[], + contexts?: ChatContext[] + ): QueuedChatMessage => { + const id = generateId() + const handoffChatId = selectedChatIdRef.current ?? chatIdRef.current + const cachedActiveStreamId = handoffChatId + ? queryClient.getQueryData(taskKeys.detail(handoffChatId))?.activeStreamId + : undefined + const supersededStreamId = + streamIdRef.current || + activeTurnRef.current?.userMessageId || + locallyTerminalStreamIdRef.current || + cachedActiveStreamId || + null + + return { + id, + content: message, + fileAttachments, + contexts, + ...(supersededStreamId || handoffChatId + ? { + queuedSendHandoff: { + id, + ...(handoffChatId ? { chatId: handoffChatId } : {}), + supersededStreamId, + }, + } + : {}), + } + }, + [queryClient] + ) + const finalize = useCallback( - (options?: { error?: boolean }) => { + (options?: { error?: boolean; targetChatId?: string }) => { const isError = !!options?.error const hasQueuedFollowUp = !isError && messageQueueRef.current.length > 0 reconcileTerminalPreviewSessions() @@ -3227,7 +3932,10 @@ export function useChat( clearActiveTurn() setTransportIdle() abortControllerRef.current = null - invalidateChatQueries({ includeDetail: !hasQueuedFollowUp }) + invalidateChatQueries({ + includeDetail: !hasQueuedFollowUp, + ...(options?.targetChatId ? { targetChatId: options.targetChatId } : {}), + }) notifyTurnEnded({ error: isError }) }, [ @@ -3251,21 +3959,21 @@ export function useChat( ) => { if (!message.trim() || !workspaceId) return false const pendingStop = pendingStopOverride ?? pendingStopPromiseRef.current + const pendingStopStreamId = pendingStop + ? queuedSendHandoff?.supersededStreamId || + locallyTerminalStreamIdRef.current || + streamIdRef.current || + activeTurnRef.current?.userMessageId + : undefined - const gen = ++streamGenRef.current let consumedByTranscript = false setError(null) setTransportStreaming() - locallyTerminalStreamIdRef.current = undefined const userMessageId = queuedSendHandoff?.userMessageId ?? generateId() const assistantId = getLiveAssistantMessageId(userMessageId) - streamIdRef.current = userMessageId - lastCursorRef.current = '0' - resetStreamingBuffers() - const storedAttachments: PersistedFileAttachment[] | undefined = fileAttachments && fileAttachments.length > 0 ? fileAttachments.map((f) => ({ @@ -3277,11 +3985,14 @@ export function useChat( })) : undefined - const requestChatId = selectedChatIdRef.current ?? chatIdRef.current - if (queuedSendHandoff) { + let requestChatId = + queuedSendHandoff?.chatId ?? selectedChatIdRef.current ?? chatIdRef.current + const writeQueuedSendHandoff = (chatId?: string) => { + if (!queuedSendHandoff) return + if (!chatId && !queuedSendHandoff.supersededStreamId) return writeQueuedSendHandoffState({ id: queuedSendHandoff.id, - chatId: queuedSendHandoff.chatId, + ...(chatId ? { chatId } : {}), workspaceId, supersededStreamId: queuedSendHandoff.supersededStreamId, userMessageId, @@ -3291,6 +4002,9 @@ export function useChat( requestedAt: Date.now(), }) } + if (queuedSendHandoff) { + writeQueuedSendHandoff(queuedSendHandoff.chatId) + } const messageContexts = contexts?.map((c) => ({ kind: c.kind, label: c.label, @@ -3331,12 +4045,6 @@ export function useChat( content: '', contentBlocks: [], } - activeTurnRef.current = { - userMessageId, - assistantMessageId: assistantId, - optimisticUserMessage, - optimisticAssistantMessage, - } if (requestChatId) { await queryClient.cancelQueries({ queryKey: taskKeys.detail(requestChatId) }) @@ -3395,27 +4103,76 @@ export function useChat( onOptimisticSendApplied?.() consumedByTranscript = true - const abortController = new AbortController() - abortControllerRef.current = abortController - + let gen: number | undefined + let streamTargetChatId: string | undefined try { if (pendingStop) { try { await pendingStop + if (!requestChatId) { + requestChatId = + queuedSendHandoff?.chatId ?? + (queuedSendHandoff ? undefined : selectedChatIdRef.current) ?? + chatIdRef.current + if (!requestChatId && pendingStopStreamId) { + const resolvedChatId = await resolveChatIdForStream(pendingStopStreamId, { + preferExistingChatId: false, + }) + if (resolvedChatId) { + if (!selectedChatIdRef.current || selectedChatIdRef.current === resolvedChatId) { + adoptResolvedChatId(resolvedChatId, { replaceHomeHistory: true }) + } + requestChatId = resolvedChatId + } + } + if (requestChatId) { + writeQueuedSendHandoff(requestChatId) + } + } + if ((queuedSendHandoff || pendingStopStreamId) && !requestChatId) { + throw new Error('Cannot send queued message until the active chat is known.') + } + if ( + queuedSendHandoff && + requestChatId && + selectedChatIdRef.current && + selectedChatIdRef.current !== requestChatId + ) { + throw new Error('Queued message was restored because the selected task changed.') + } if (requestChatId) { await queryClient.cancelQueries({ queryKey: taskKeys.detail(requestChatId) }) } applyOptimisticSend() } catch (err) { + if (queuedSendHandoff) { + clearQueuedSendHandoffClaim(queuedSendHandoff.id) + } rollbackOptimisticSend() - abortControllerRef.current = null - clearActiveTurn() - setTransportIdle() + if (!streamReaderRef.current && !abortControllerRef.current) { + clearActiveTurn() + setTransportIdle() + } setError(err instanceof Error ? err.message : 'Failed to stop the previous response') return false } } + streamTargetChatId = requestChatId + gen = ++streamGenRef.current + locallyTerminalStreamIdRef.current = undefined + streamIdRef.current = userMessageId + lastCursorRef.current = '0' + resetStreamingBuffers() + activeTurnRef.current = { + userMessageId, + assistantMessageId: assistantId, + optimisticUserMessage, + optimisticAssistantMessage, + } + const abortController = new AbortController() + abortControllerRef.current = abortController + const currentActiveId = activeResourceIdRef.current const currentResources = resourcesRef.current const resourceAttachments = @@ -3461,15 +4218,32 @@ export function useChat( typeof errorData.activeStreamId === 'string' ? errorData.activeStreamId : userMessageId + const supersededStreamId = queuedSendHandoff?.supersededStreamId ?? pendingStopStreamId + if (supersededStreamId && conflictStreamId === supersededStreamId) { + rollbackOptimisticSend() + if (streamGenRef.current === gen) { + streamGenRef.current++ + abortController.abort('queued_handoff:superseded_conflict') + abortControllerRef.current = null + clearActiveTurn() + setTransportIdle() + } + setError('Previous response is still shutting down; queued message was restored.') + return false + } streamIdRef.current = conflictStreamId const succeeded = await retryReconnect({ streamId: conflictStreamId, assistantId, gen, + ...(streamTargetChatId ? { targetChatId: streamTargetChatId } : {}), }) if (succeeded) return consumedByTranscript if (streamGenRef.current === gen) { - finalize({ error: true }) + finalize({ + error: true, + ...(streamTargetChatId ? { targetChatId: streamTargetChatId } : {}), + }) } return consumedByTranscript } @@ -3482,17 +4256,24 @@ export function useChat( if (!response.body) throw new Error('No response body') - const streamResult = await processSSEStream(response.body.getReader(), assistantId, gen) + const streamResult = await processSSEStream(response.body.getReader(), assistantId, gen, { + ...(streamTargetChatId ? { targetChatId: streamTargetChatId } : {}), + }) if (streamGenRef.current === gen) { if (streamResult.sawStreamError) { - finalize({ error: true }) + finalize({ + error: true, + ...(streamTargetChatId ? { targetChatId: streamTargetChatId } : {}), + }) return consumedByTranscript } // A live SSE `complete` event is already terminal. Finalize immediately so follow-up // sends do not get spuriously queued behind an already-finished response. if (streamResult.sawComplete) { - finalize() + finalize({ + ...(streamTargetChatId ? { targetChatId: streamTargetChatId } : {}), + }) return consumedByTranscript } @@ -3502,34 +4283,44 @@ export function useChat( gen, afterCursor: lastCursorRef.current || '0', signal: abortController.signal, + ...(streamTargetChatId ? { targetChatId: streamTargetChatId } : {}), }) if (streamGenRef.current === gen && sendingRef.current) { - finalize() + finalize({ + ...(streamTargetChatId ? { targetChatId: streamTargetChatId } : {}), + }) } } } catch (err) { if (err instanceof Error && err.name === 'AbortError') return consumedByTranscript if (isStreamSchemaValidationError(err)) { setError(err.message) - if (streamGenRef.current === gen) { - finalize({ error: true }) + if (gen !== undefined && streamGenRef.current === gen) { + finalize({ + error: true, + ...(streamTargetChatId ? { targetChatId: streamTargetChatId } : {}), + }) } return consumedByTranscript } const activeStreamId = streamIdRef.current - if (activeStreamId && streamGenRef.current === gen) { + if (activeStreamId && gen !== undefined && streamGenRef.current === gen) { const succeeded = await retryReconnect({ streamId: activeStreamId, assistantId, gen, + ...(streamTargetChatId ? { targetChatId: streamTargetChatId } : {}), }) if (succeeded) return consumedByTranscript } setError(err instanceof Error ? err.message : 'Failed to send message') - if (streamGenRef.current === gen) { - finalize({ error: true }) + if (gen !== undefined && streamGenRef.current === gen) { + finalize({ + error: true, + ...(streamTargetChatId ? { targetChatId: streamTargetChatId } : {}), + }) } return consumedByTranscript } @@ -3545,6 +4336,8 @@ export function useChat( retryReconnect, clearActiveTurn, resetStreamingBuffers, + resolveChatIdForStream, + adoptResolvedChatId, setTransportIdle, setTransportStreaming, ] @@ -3554,19 +4347,25 @@ export function useChat( if (!message.trim() || !workspaceId) return if (sendingRef.current) { - const queued: QueuedMessage = { - id: generateId(), - content: message, - fileAttachments, - contexts, - } - setMessageQueue((prev) => [...prev, queued]) + setMessageQueue((prev) => [ + ...prev, + createQueuedMessage(message, fileAttachments, contexts), + ]) + return + } + + if (pendingStopPromiseRef.current) { + setMessageQueue((prev) => [ + ...prev, + createQueuedMessage(message, fileAttachments, contexts), + ]) + void enqueueQueueDispatchRef.current({ type: 'send_head' }) return } await startSendMessage(message, fileAttachments, contexts) }, - [workspaceId, startSendMessage] + [workspaceId, startSendMessage, createQueuedMessage] ) useEffect(() => { if (typeof window === 'undefined') return @@ -3582,13 +4381,136 @@ export function useChat( window.removeEventListener('beforeunload', clearClaim) } }, []) + useEffect(() => { + if (!workspaceId || sendingRef.current || pendingStopPromiseRef.current) return + + let cancelled = false + const handoff = readQueuedSendHandoffState() + if (!handoff || handoff.workspaceId !== workspaceId) return + if (recoveringQueuedSendHandoffRef.current?.id === handoff.id) return + const claimRetryDelayMs = queuedSendHandoffClaimRetryDelay(handoff.id) + if (claimRetryDelayMs !== null) { + const retryTimer = window.setTimeout(() => { + setQueuedHandoffRecoveryEpoch((epoch) => epoch + 1) + }, claimRetryDelayMs) + return () => window.clearTimeout(retryTimer) + } + + if (handoff.chatId) { + if (selectedChatIdRef.current && selectedChatIdRef.current !== handoff.chatId) return + adoptResolvedChatId(handoff.chatId, { replaceHomeHistory: true }) + return + } + + if (!handoff.supersededStreamId) return + + const claimOwnerId = writeQueuedSendHandoffClaim(handoff.id) + recoveringQueuedSendHandoffRef.current = { id: handoff.id, ownerId: claimOwnerId } + const effectAbortController = new AbortController() + let shouldRetry = false + void (async () => { + const chatId = await resolveChatIdForStream(handoff.supersededStreamId as string, { + preferExistingChatId: false, + signal: effectAbortController.signal, + }) + if (!chatId) { + shouldRetry = true + return + } + if (cancelled) return + const currentHandoff = readQueuedSendHandoffState() + if ( + !currentHandoff || + currentHandoff.id !== handoff.id || + currentHandoff.workspaceId !== workspaceId || + currentHandoff.userMessageId !== handoff.userMessageId || + currentHandoff.supersededStreamId !== handoff.supersededStreamId || + currentHandoff.chatId || + !hasQueuedSendHandoffClaimOwner(handoff.id, claimOwnerId) + ) { + return + } + writeQueuedSendHandoffState({ + ...currentHandoff, + chatId, + requestedAt: Date.now(), + }) + setQueuedHandoffRecoveryEpoch((epoch) => epoch + 1) + if (!selectedChatIdRef.current || selectedChatIdRef.current === chatId) { + adoptResolvedChatId(chatId, { replaceHomeHistory: true, invalidateList: true }) + } + })() + .catch((error) => { + if (error instanceof Error && error.name === 'AbortError') return + logger.warn('Failed to resolve queued send handoff chat id', { + handoffId: handoff.id, + streamId: handoff.supersededStreamId, + error: toError(error).message, + }) + }) + .finally(async () => { + if ( + shouldRetry && + !cancelled && + recoveringQueuedSendHandoffRef.current?.id === handoff.id && + recoveringQueuedSendHandoffRef.current.ownerId === claimOwnerId + ) { + const currentHandoff = readQueuedSendHandoffState() + if (currentHandoff?.id === handoff.id && !currentHandoff.chatId) { + const resolveAttempts = (currentHandoff.resolveAttempts ?? 0) + 1 + writeQueuedSendHandoffState({ ...currentHandoff, resolveAttempts }) + try { + await sleepWithAbort( + queuedSendHandoffResolveRetryDelay(resolveAttempts), + effectAbortController.signal + ) + } catch (error) { + if (error instanceof Error && error.name === 'AbortError') return + logger.warn('Failed to back off queued send handoff recovery', { + handoffId: handoff.id, + error: toError(error).message, + }) + return + } + if ( + !cancelled && + recoveringQueuedSendHandoffRef.current?.id === handoff.id && + recoveringQueuedSendHandoffRef.current.ownerId === claimOwnerId + ) { + recoveringQueuedSendHandoffRef.current = null + clearQueuedSendHandoffClaim(handoff.id, claimOwnerId) + setQueuedHandoffRecoveryEpoch((epoch) => epoch + 1) + } + return + } + } + if ( + recoveringQueuedSendHandoffRef.current?.id === handoff.id && + recoveringQueuedSendHandoffRef.current.ownerId === claimOwnerId + ) { + recoveringQueuedSendHandoffRef.current = null + } + clearQueuedSendHandoffClaim(handoff.id, claimOwnerId) + }) + return () => { + cancelled = true + effectAbortController.abort('cleanup:queued_handoff_recovery') + if ( + recoveringQueuedSendHandoffRef.current?.id === handoff.id && + recoveringQueuedSendHandoffRef.current.ownerId === claimOwnerId + ) { + recoveringQueuedSendHandoffRef.current = null + } + clearQueuedSendHandoffClaim(handoff.id, claimOwnerId) + } + }, [workspaceId, queuedHandoffRecoveryEpoch, adoptResolvedChatId, resolveChatIdForStream]) useEffect(() => { if (!workspaceId || !chatHistory || sendingRef.current || pendingStopPromiseRef.current) return const handoff = readQueuedSendHandoffState() if (!handoff) return if (handoff.workspaceId !== workspaceId || handoff.chatId !== chatHistory.id) return - if (recoveringQueuedSendHandoffIdRef.current === handoff.id) return + if (recoveringQueuedSendHandoffRef.current?.id === handoff.id) return if (readQueuedSendHandoffClaim() === handoff.id) return if ( @@ -3610,8 +4532,8 @@ export function useChat( return } - recoveringQueuedSendHandoffIdRef.current = handoff.id - writeQueuedSendHandoffClaim(handoff.id) + const claimOwnerId = writeQueuedSendHandoffClaim(handoff.id) + recoveringQueuedSendHandoffRef.current = { id: handoff.id, ownerId: claimOwnerId } void startSendMessage( handoff.message, handoff.fileAttachments, @@ -3625,12 +4547,15 @@ export function useChat( userMessageId: handoff.userMessageId, } ).finally(() => { - if (recoveringQueuedSendHandoffIdRef.current === handoff.id) { - recoveringQueuedSendHandoffIdRef.current = null + if ( + recoveringQueuedSendHandoffRef.current?.id === handoff.id && + recoveringQueuedSendHandoffRef.current.ownerId === claimOwnerId + ) { + recoveringQueuedSendHandoffRef.current = null } - clearQueuedSendHandoffClaim(handoff.id) + clearQueuedSendHandoffClaim(handoff.id, claimOwnerId) }) - }, [workspaceId, chatHistory, startSendMessage]) + }, [workspaceId, chatHistory, queuedHandoffRecoveryEpoch, startSendMessage]) const cancelActiveWorkflowExecutions = useCallback(() => { const execState = useExecutionStore.getState() const consoleStore = useTerminalConsoleStore.getState() @@ -3677,172 +4602,358 @@ export function useChat( } }, [executionStream]) - const stopGeneration = useCallback(async () => { - if (pendingStopPromiseRef.current) { - return pendingStopPromiseRef.current - } + const stopGeneration = useCallback( + async (options?: StopGenerationOptions) => { + const mode = options?.mode ?? 'normal' + if (pendingStopPromiseRef.current) { + if (mode === 'queued-handoff' && pendingStopModeRef.current !== 'queued-handoff') { + throw new Error('Previous response is already stopping; queued message was restored.') + } + return pendingStopPromiseRef.current + } - const wasSending = sendingRef.current - const activeChatId = chatIdRef.current - const sid = - streamIdRef.current || - activeTurnRef.current?.userMessageId || - queryClient.getQueryData(taskKeys.detail(chatIdRef.current)) - ?.activeStreamId || - undefined - // Snapshot the active assistant message id BEFORE clearActiveTurn() - // nulls the ref. Used below to restrict markMessageStopped to the - // in-flight turn only — historical messages from the chat history - // also lack `endedAt` on their legacy blocks (pre-timing-fields), - // and without this gate we'd corrupt them with cancelled markers. - const activeAssistantMessageId = - activeTurnRef.current?.assistantMessageId ?? - (sid ? getLiveAssistantMessageId(sid) : undefined) - const stopContentSnapshot = streamingContentRef.current - const stopNow = Date.now() - const stopBlocksSnapshot = streamingBlocksRef.current.map((block) => ({ - ...block, - ...(block.options ? { options: [...block.options] } : {}), - ...(block.toolCall ? { toolCall: { ...block.toolCall } } : {}), - ...(block.endedAt === undefined ? { endedAt: stopNow } : {}), - })) - // Snapshot BEFORE clearActiveTurn() nulls the refs. Both - // persistPartialResponse and the abort/stop fetches run inside - // stopBarrier below, after several awaits — the refs are long - // gone by the time the fetches serialize their headers. - const stopRequestIdSnapshot = streamRequestIdRef.current - const stopTraceparentSnapshot = streamTraceparentRef.current - - locallyTerminalStreamIdRef.current = sid - streamGenRef.current++ - clearActiveTurn() - streamReaderRef.current?.cancel().catch(() => {}) - streamReaderRef.current = null - abortControllerRef.current?.abort('user_stop:client_stopGeneration') - abortControllerRef.current = null - setTransportIdle() + let resolveStopOperation!: () => void + let rejectStopOperation!: (error: unknown) => void + const stopOperation = new Promise((resolve, reject) => { + resolveStopOperation = resolve + rejectStopOperation = reject + }) + stopOperation.catch(() => {}) + pendingStopPromiseRef.current = stopOperation + pendingStopModeRef.current = mode + + const wasSending = sendingRef.current + let activeChatId = chatIdRef.current ?? selectedChatIdRef.current + const sid = + streamIdRef.current || + activeTurnRef.current?.userMessageId || + (activeChatId + ? queryClient.getQueryData(taskKeys.detail(activeChatId))?.activeStreamId + : undefined) || + undefined + + const activeAssistantMessageId = + activeTurnRef.current?.assistantMessageId ?? + (sid ? getLiveAssistantMessageId(sid) : undefined) + const initialStopRequestIdSnapshot = streamRequestIdRef.current + const initialStopTraceparentSnapshot = streamTraceparentRef.current + + try { + if (mode === 'queued-handoff' && !activeChatId && !sid) { + throw new Error('Cannot send queued message until the active chat is known.') + } + } catch (err) { + if (pendingStopPromiseRef.current === stopOperation) { + pendingStopPromiseRef.current = null + pendingStopModeRef.current = null + } + setError(err instanceof Error ? err.message : 'Failed to stop the previous response') + rejectStopOperation(err) + throw err + } - if (activeChatId) { - await queryClient.cancelQueries({ queryKey: taskKeys.detail(activeChatId) }) - upsertTaskChatHistory(activeChatId, (current) => ({ - ...current, - messages: current.messages.map((message) => - activeAssistantMessageId && message.id === activeAssistantMessageId - ? markMessageStopped(message) - : message - ), + const stopContentSnapshot = streamingContentRef.current + const stopNow = Date.now() + const stopBlocksSnapshot = streamingBlocksRef.current.map((block) => ({ + ...block, + ...(block.options ? { options: [...block.options] } : {}), + ...(block.toolCall ? { toolCall: { ...block.toolCall } } : {}), + ...(block.endedAt === undefined ? { endedAt: stopNow } : {}), })) - } else { - setPendingMessages((prev) => - prev.map((msg) => { - const hasExecutingTool = msg.contentBlocks?.some( - (block) => block.toolCall?.status === 'executing' - ) - const hasOpenBlock = msg.contentBlocks?.some((block) => block.endedAt === undefined) - if (!hasExecutingTool && !hasOpenBlock) { - return msg - } - const updatedBlocks = (msg.contentBlocks ?? []).map((block) => { - const stamped = block.endedAt === undefined ? { ...block, endedAt: stopNow } : block - if (stamped.toolCall?.status !== 'executing') { - return stamped - } - return { - ...stamped, - toolCall: { - ...stamped.toolCall, - status: 'cancelled' as const, - displayTitle: 'Stopped by user', - }, - } - }) - updatedBlocks.push({ type: 'stopped' as const }) - return { ...msg, contentBlocks: updatedBlocks } - }) - ) - } + const stopRequestIdSnapshot = streamRequestIdRef.current ?? initialStopRequestIdSnapshot + const stopTraceparentSnapshot = streamTraceparentRef.current ?? initialStopTraceparentSnapshot - // Cancel active run-tool executions before waiting for the server-side stream - // shutdown barrier; otherwise the abort settle can sit behind tool execution teardown. - cancelActiveWorkflowExecutions() + locallyTerminalStreamIdRef.current = sid + streamGenRef.current++ + clearActiveTurn() + streamReaderRef.current?.cancel().catch(() => {}) + streamReaderRef.current = null + abortControllerRef.current?.abort('user_stop:client_stopGeneration') + abortControllerRef.current = null + setTransportIdle() - const stopBarrier = (async () => { try { - if (wasSending && !chatIdRef.current) { - const start = Date.now() - while (!chatIdRef.current && Date.now() - start < 3000) { - await new Promise((r) => setTimeout(r, 50)) - } + if (activeChatId) { + await queryClient.cancelQueries({ queryKey: taskKeys.detail(activeChatId) }) + upsertTaskChatHistory(activeChatId, (current) => ({ + ...current, + messages: current.messages.map((message) => + activeAssistantMessageId && message.id === activeAssistantMessageId + ? markMessageStopped(message) + : message + ), + })) + } else { + setPendingMessages((prev) => + prev.map((msg) => { + const hasExecutingTool = msg.contentBlocks?.some( + (block) => block.toolCall?.status === 'executing' + ) + const hasOpenBlock = msg.contentBlocks?.some((block) => block.endedAt === undefined) + if (!hasExecutingTool && !hasOpenBlock) { + return msg + } + const updatedBlocks = (msg.contentBlocks ?? []).map((block) => { + const stamped = block.endedAt === undefined ? { ...block, endedAt: stopNow } : block + if (stamped.toolCall?.status !== 'executing') { + return stamped + } + return { + ...stamped, + toolCall: { + ...stamped.toolCall, + status: 'cancelled' as const, + displayTitle: 'Stopped by user', + }, + } + }) + updatedBlocks.push({ type: 'stopped' as const }) + return { ...msg, contentBlocks: updatedBlocks } + }) + ) + } + } catch (err) { + if (sid && locallyTerminalStreamIdRef.current === sid) { + locallyTerminalStreamIdRef.current = undefined + } + if (pendingStopPromiseRef.current === stopOperation) { + pendingStopPromiseRef.current = null + pendingStopModeRef.current = null } + setError(err instanceof Error ? err.message : 'Failed to stop the previous response') + rejectStopOperation(err) + throw err + } - const resolvedChatId = chatIdRef.current - const abortPromise = sid - ? (async () => { - // boundary-raw-fetch: stream-abort endpoint requires propagating the snapshotted traceparent header from the in-flight stream and has no contract authored yet - const res = await fetch('/api/mothership/chat/abort', { - method: 'POST', - headers: { - 'Content-Type': 'application/json', - ...(stopTraceparentSnapshot ? { traceparent: stopTraceparentSnapshot } : {}), - }, - body: JSON.stringify({ - streamId: sid, - ...(resolvedChatId ? { chatId: resolvedChatId } : {}), - }), + // Cancel active run-tool executions before waiting for the server-side stream + // shutdown barrier; otherwise the abort settle can sit behind tool execution teardown. + cancelActiveWorkflowExecutions() + + let abortSucceeded = false + const stopBarrier = (async () => { + let stopSucceeded = false + try { + let resolvedChatId = activeChatId ?? chatIdRef.current + let abortSettled = false + const postAbortRequest = async (chatId?: string): Promise => { + if (!sid) return true + // boundary-raw-fetch: stream-abort endpoint requires propagating the snapshotted traceparent header from the in-flight stream and has no contract authored yet + const res = await fetch('/api/mothership/chat/abort', { + method: 'POST', + signal: createTimeoutSignal(STOP_REQUEST_TIMEOUT_MS), + headers: { + 'Content-Type': 'application/json', + ...(stopTraceparentSnapshot ? { traceparent: stopTraceparentSnapshot } : {}), + }, + body: JSON.stringify({ + streamId: sid, + ...(chatId ? { chatId } : {}), + }), + }) + const payload: unknown = await res.json().catch(() => null) + if (isRecord(payload) && payload.aborted === true) { + abortSucceeded = true + } + if (!res.ok) { + if (isRecord(payload) && payload.settled === false) { + return false + } + throw new Error( + isRecord(payload) && typeof payload.error === 'string' + ? payload.error + : 'Failed to abort previous response' + ) + } + abortSucceeded = true + return isRecord(payload) && payload.settled === true + } + const abortPromise = sid + ? postAbortRequest(resolvedChatId).then((settled) => { + abortSettled = settled }) - if (!res.ok) { - const payload = await res.json().catch(() => null) + : Promise.resolve() + + let stopFailure: unknown + let abortFailure: unknown + try { + if (mode === 'queued-handoff' && !resolvedChatId && sid) { + resolvedChatId = await resolveChatIdForStream(sid, { + preferExistingChatId: false, + }) + if (!resolvedChatId) { + throw new Error('Cannot send queued message until the active chat is known.') + } + if ( + pendingStopPromiseRef.current !== stopOperation || + locallyTerminalStreamIdRef.current !== sid + ) { throw new Error( - typeof payload?.error === 'string' - ? payload.error - : 'Failed to abort previous response' + 'Previous response stop was superseded; queued message was restored.' ) } - })() - : Promise.resolve() - - if (wasSending && resolvedChatId) { - await persistPartialResponse({ - chatId: resolvedChatId, - streamId: sid, - content: stopContentSnapshot, - blocks: stopBlocksSnapshot, - requestId: stopRequestIdSnapshot, - traceparent: stopTraceparentSnapshot, + activeChatId = resolvedChatId + if (!selectedChatIdRef.current || selectedChatIdRef.current === resolvedChatId) { + adoptResolvedChatId(resolvedChatId, { replaceHomeHistory: true }) + } + } + + if (wasSending && resolvedChatId) { + await persistPartialResponse({ + chatId: resolvedChatId, + streamId: sid, + content: stopContentSnapshot, + blocks: stopBlocksSnapshot, + requestId: stopRequestIdSnapshot, + traceparent: stopTraceparentSnapshot, + }) + } + } catch (err) { + stopFailure = err + } + + try { + await abortPromise + } catch (err) { + abortFailure = err + } + if (sid && resolvedChatId && !abortSettled) { + try { + const retrySettled = await postAbortRequest(resolvedChatId) + abortSettled = retrySettled + abortFailure = retrySettled + ? undefined + : new Error('Previous response is still shutting down.') + } catch (err) { + abortFailure = err + } + } + + if (stopFailure || abortFailure) throw stopFailure ?? abortFailure + if (wasSending && resolvedChatId) { + activeChatId = resolvedChatId + } + stopSucceeded = true + } finally { + invalidateChatQueries({ + includeDetail: mode !== 'queued-handoff' || !stopSucceeded, }) + resetEphemeralPreviewState({ removeStreamingResource: true }) } + })() - await abortPromise + try { + await stopBarrier + notifyTurnEnded({ + error: false, + skipQueueDispatch: mode === 'queued-handoff', + }) + resolveStopOperation() + } catch (err) { + if (sid && !abortSucceeded && locallyTerminalStreamIdRef.current === sid) { + locallyTerminalStreamIdRef.current = undefined + } + if (activeChatId) { + invalidateChatQueries() + } + setError(err instanceof Error ? err.message : 'Failed to stop the previous response') + rejectStopOperation(err) + throw err } finally { - invalidateChatQueries() - resetEphemeralPreviewState({ removeStreamingResource: true }) + if (pendingStopPromiseRef.current === stopOperation) { + pendingStopPromiseRef.current = null + pendingStopModeRef.current = null + } } - })() + }, + [ + cancelActiveWorkflowExecutions, + invalidateChatQueries, + notifyTurnEnded, + persistPartialResponse, + queryClient, + resolveChatIdForStream, + resetEphemeralPreviewState, + upsertTaskChatHistory, + adoptResolvedChatId, + clearActiveTurn, + setTransportIdle, + workspaceId, + ] + ) - pendingStopPromiseRef.current = stopBarrier - try { - await stopBarrier - // Dispatch queued follow-ups after Stop resolves. - notifyTurnEnded({ error: false }) - } catch (err) { - setError(err instanceof Error ? err.message : 'Failed to stop the previous response') - throw err - } finally { - if (pendingStopPromiseRef.current === stopBarrier) { - pendingStopPromiseRef.current = null + const dispatchQueuedMessage = useCallback( + async ( + msg: QueuedChatMessage, + options: { + epoch: number + pendingStop?: Promise | null + queuedSendHandoff?: QueuedSendHandoffSeed } - } - }, [ - cancelActiveWorkflowExecutions, - invalidateChatQueries, - notifyTurnEnded, - persistPartialResponse, - queryClient, - resetEphemeralPreviewState, - upsertTaskChatHistory, - clearActiveTurn, - setTransportIdle, - ]) + ) => { + if (queuedMessageDispatchIdsRef.current.has(msg.id)) { + return + } + queuedMessageDispatchIdsRef.current.add(msg.id) + + let originalIndex = messageQueueRef.current.findIndex((queued) => queued.id === msg.id) + if (originalIndex === -1) { + queuedMessageDispatchIdsRef.current.delete(msg.id) + return + } + + let removedFromQueue = false + const removeQueuedMessage = () => { + if (removedFromQueue || options.epoch !== queueDispatchEpochRef.current) { + return + } + removedFromQueue = true + setMessageQueue((prev) => prev.filter((queued) => queued.id !== msg.id)) + } + + const restoreQueuedMessage = (handoff?: QueuedSendHandoffSeed) => { + if (!handoff) { + clearQueuedSendHandoffState(msg.id) + } + clearQueuedSendHandoffClaim(msg.id) + if (!removedFromQueue || options.epoch !== queueDispatchEpochRef.current) { + return + } + setMessageQueue((prev) => { + if (prev.some((queued) => queued.id === msg.id)) return prev + const next = [...prev] + next.splice(Math.min(originalIndex, next.length), 0, msg) + return next + }) + } + + const activeQueuedSendHandoff = options.queuedSendHandoff ?? msg.queuedSendHandoff + try { + const currentIndex = messageQueueRef.current.findIndex((queued) => queued.id === msg.id) + if (currentIndex === -1) { + return + } + originalIndex = currentIndex + + const consumed = await startSendMessage( + msg.content, + msg.fileAttachments, + msg.contexts, + options.pendingStop, + removeQueuedMessage, + activeQueuedSendHandoff + ) + + if (!consumed) { + restoreQueuedMessage(activeQueuedSendHandoff) + } + } catch { + restoreQueuedMessage(activeQueuedSendHandoff) + } finally { + queuedMessageDispatchIdsRef.current.delete(msg.id) + } + }, + [startSendMessage] + ) const runQueueDispatchLoop = useCallback(async () => { if (queueDispatchTaskRef.current) { @@ -3861,47 +4972,7 @@ export function useChat( const msg = messageQueueRef.current[0] if (!msg) continue - let originalIndex = 0 - let removedFromQueue = false - const removeQueuedMessage = () => { - if (removedFromQueue || action.epoch !== queueDispatchEpochRef.current) { - return - } - removedFromQueue = true - setMessageQueue((prev) => prev.filter((queued) => queued.id !== msg.id)) - } - - try { - const currentIndex = messageQueueRef.current.findIndex((queued) => queued.id === msg.id) - if (currentIndex !== -1) { - originalIndex = currentIndex - } - - const consumed = await startSendMessage( - msg.content, - msg.fileAttachments, - msg.contexts, - undefined, - removeQueuedMessage - ) - if (!consumed && removedFromQueue && action.epoch === queueDispatchEpochRef.current) { - setMessageQueue((prev) => { - if (prev.some((queued) => queued.id === msg.id)) return prev - const next = [...prev] - next.splice(Math.min(originalIndex, next.length), 0, msg) - return next - }) - } - } catch { - if (removedFromQueue && action.epoch === queueDispatchEpochRef.current) { - setMessageQueue((prev) => { - if (prev.some((queued) => queued.id === msg.id)) return prev - const next = [...prev] - next.splice(Math.min(originalIndex, next.length), 0, msg) - return next - }) - } - } + await dispatchQueuedMessage(msg, { epoch: action.epoch }) } })() @@ -3915,7 +4986,7 @@ export function useChat( void queueDispatchLoopRef.current() } }) - }, [startSendMessage]) + }, [dispatchQueuedMessage]) queueDispatchLoopRef.current = runQueueDispatchLoop const enqueueQueueDispatch = useCallback((action: QueueDispatchActionInput) => { @@ -3933,84 +5004,49 @@ export function useChat( const sendQueuedMessageImmediately = useCallback( async (id: string) => { - const epoch = queueDispatchEpochRef.current - const initialIndex = messageQueueRef.current.findIndex((m) => m.id === id) - if (initialIndex === -1) return - const msg = messageQueueRef.current[initialIndex] - - if (queuedMessageDispatchIdsRef.current.has(msg.id)) { - return - } - queuedMessageDispatchIdsRef.current.add(msg.id) + const msg = messageQueueRef.current.find((queued) => queued.id === id) + if (!msg) return + if (queuedMessageDispatchIdsRef.current.has(msg.id)) return // Explicit queue sends should supersede any older auto-drain work scheduled by finalize(). queueDispatchActionsRef.current = queueDispatchActionsRef.current.filter( (queuedAction) => queuedAction.type !== 'send_head' ) - let originalIndex = initialIndex - let removedFromQueue = false - const removeQueuedMessage = () => { - if (removedFromQueue || epoch !== queueDispatchEpochRef.current) { - return - } - removedFromQueue = true - setMessageQueue((prev) => prev.filter((queued) => queued.id !== msg.id)) - } - const restoreQueuedMessage = () => { - if (!removedFromQueue || epoch !== queueDispatchEpochRef.current) { - return - } - setMessageQueue((prev) => { - if (prev.some((queued) => queued.id === msg.id)) return prev - const next = [...prev] - next.splice(Math.min(originalIndex, next.length), 0, msg) - return next - }) - } - - try { - const currentIndex = messageQueueRef.current.findIndex((queued) => queued.id === msg.id) - if (currentIndex === -1) { - return - } - - originalIndex = currentIndex - - const queuedSendHandoff = - sendingRef.current && workspaceId - ? { + const queuedSendHandoff = + msg.queuedSendHandoff ?? + ((sendingRef.current || pendingStopPromiseRef.current) && workspaceId + ? (() => { + const handoffChatId = selectedChatIdRef.current ?? chatIdRef.current + const cachedActiveStreamId = handoffChatId + ? queryClient.getQueryData(taskKeys.detail(handoffChatId)) + ?.activeStreamId + : undefined + return { id: msg.id, - chatId: selectedChatIdRef.current ?? chatIdRef.current ?? '', + ...(handoffChatId ? { chatId: handoffChatId } : {}), supersededStreamId: streamIdRef.current || activeTurnRef.current?.userMessageId || - queryClient.getQueryData( - taskKeys.detail(selectedChatIdRef.current ?? chatIdRef.current) - )?.activeStreamId || + cachedActiveStreamId || null, } - : undefined - const pendingStop = sendingRef.current ? stopGeneration() : pendingStopPromiseRef.current - const consumed = await startSendMessage( - msg.content, - msg.fileAttachments, - msg.contexts, - pendingStop, - removeQueuedMessage, - queuedSendHandoff?.chatId ? queuedSendHandoff : undefined - ) + })() + : undefined) - if (!consumed) { - restoreQueuedMessage() - } - } catch { - restoreQueuedMessage() - } finally { - queuedMessageDispatchIdsRef.current.delete(msg.id) - } + const pendingStop = sendingRef.current + ? stopGeneration({ + mode: 'queued-handoff', + }) + : pendingStopPromiseRef.current + + await dispatchQueuedMessage(msg, { + epoch: queueDispatchEpochRef.current, + pendingStop, + queuedSendHandoff, + }) }, - [startSendMessage, stopGeneration] + [dispatchQueuedMessage, queryClient, stopGeneration, workspaceId] ) const sendNow = useCallback( @@ -4031,14 +5067,21 @@ export function useChat( useEffect(() => { return () => { + cancelActiveStreamRecovery() clearQueueDispatchState() - streamReaderRef.current = null - abortControllerRef.current = null streamGenRef.current++ + cancelActiveStreamReader() + abortControllerRef.current?.abort('unmount:client_cleanup') + abortControllerRef.current = null clearActiveTurn() sendingRef.current = false } - }, [clearQueueDispatchState, clearActiveTurn]) + }, [ + cancelActiveStreamRecovery, + cancelActiveStreamReader, + clearQueueDispatchState, + clearActiveTurn, + ]) return { messages, diff --git a/apps/sim/hooks/use-task-events.test.ts b/apps/sim/hooks/use-task-events.test.ts index 2e68175b935..e81edbca6dd 100644 --- a/apps/sim/hooks/use-task-events.test.ts +++ b/apps/sim/hooks/use-task-events.test.ts @@ -9,14 +9,46 @@ import { handleTaskStatusEvent } from '@/hooks/use-task-events' describe('handleTaskStatusEvent', () => { const queryClient = { + getQueryData: vi.fn(), invalidateQueries: vi.fn().mockResolvedValue(undefined), - } satisfies Pick + removeQueries: vi.fn(), + } satisfies Pick beforeEach(() => { vi.clearAllMocks() + queryClient.getQueryData.mockReturnValue(undefined) }) - it('invalidates only the task list for completed task events', () => { + it('invalidates the task list and detail for completed task events', () => { + handleTaskStatusEvent( + queryClient, + 'ws-1', + JSON.stringify({ + chatId: 'chat-1', + type: 'completed', + timestamp: Date.now(), + }) + ) + + expect(queryClient.invalidateQueries).toHaveBeenCalledTimes(2) + expect(queryClient.invalidateQueries).toHaveBeenCalledWith({ + queryKey: taskKeys.list('ws-1'), + }) + expect(queryClient.invalidateQueries).toHaveBeenCalledWith({ + queryKey: taskKeys.detail('chat-1'), + }) + expect(queryClient.removeQueries).not.toHaveBeenCalled() + }) + + it('keeps completed task detail when an unkeyed completion races an active stream', () => { + queryClient.getQueryData.mockReturnValue({ + id: 'chat-1', + title: null, + messages: [{ id: 'new-stream' }, { id: 'live-assistant:new-stream' }], + activeStreamId: 'new-stream', + resources: [], + }) + handleTaskStatusEvent( queryClient, 'ws-1', @@ -31,15 +63,333 @@ describe('handleTaskStatusEvent', () => { expect(queryClient.invalidateQueries).toHaveBeenCalledWith({ queryKey: taskKeys.list('ws-1'), }) + expect(queryClient.removeQueries).not.toHaveBeenCalled() }) - it('keeps list invalidation only for non-completed task events', () => { + it('keeps completed task detail when a newer optimistic stream is active', () => { + queryClient.getQueryData.mockReturnValue({ + id: 'chat-1', + title: null, + messages: [{ id: 'old-stream' }, { id: 'new-stream' }], + activeStreamId: 'new-stream', + resources: [], + }) + + handleTaskStatusEvent( + queryClient, + 'ws-1', + JSON.stringify({ + chatId: 'chat-1', + type: 'completed', + streamId: 'old-stream', + timestamp: Date.now(), + }) + ) + + expect(queryClient.invalidateQueries).toHaveBeenCalledTimes(1) + expect(queryClient.invalidateQueries).toHaveBeenCalledWith({ + queryKey: taskKeys.list('ws-1'), + }) + expect(queryClient.removeQueries).not.toHaveBeenCalled() + }) + + it('keeps completed task detail when only a newer optimistic stream is cached', () => { + queryClient.getQueryData.mockReturnValue({ + id: 'chat-1', + title: null, + messages: [{ id: 'new-stream' }, { id: 'live-assistant:new-stream' }], + activeStreamId: 'new-stream', + resources: [], + }) + + handleTaskStatusEvent( + queryClient, + 'ws-1', + JSON.stringify({ + chatId: 'chat-1', + type: 'completed', + streamId: 'old-stream', + timestamp: Date.now(), + }) + ) + + expect(queryClient.invalidateQueries).toHaveBeenCalledTimes(1) + expect(queryClient.invalidateQueries).toHaveBeenCalledWith({ + queryKey: taskKeys.list('ws-1'), + }) + expect(queryClient.removeQueries).not.toHaveBeenCalled() + }) + + it('invalidates completed task detail when the active stream disagreement is only stale cache', () => { + queryClient.getQueryData.mockReturnValue({ + id: 'chat-1', + title: null, + messages: [{ id: 'new-stream' }, { id: 'old-stream' }], + activeStreamId: 'new-stream', + resources: [], + }) + + handleTaskStatusEvent( + queryClient, + 'ws-1', + JSON.stringify({ + chatId: 'chat-1', + type: 'completed', + streamId: 'old-stream', + timestamp: Date.now(), + }) + ) + + expect(queryClient.invalidateQueries).toHaveBeenCalledTimes(2) + expect(queryClient.invalidateQueries).toHaveBeenCalledWith({ + queryKey: taskKeys.list('ws-1'), + }) + expect(queryClient.invalidateQueries).toHaveBeenCalledWith({ + queryKey: taskKeys.detail('chat-1'), + }) + expect(queryClient.removeQueries).not.toHaveBeenCalled() + }) + + it('invalidates completed task detail when a missing stream may be newer server state', () => { + queryClient.getQueryData.mockReturnValue({ + id: 'chat-1', + title: null, + messages: [{ id: 'old-stream' }], + activeStreamId: 'old-stream', + resources: [], + }) + + handleTaskStatusEvent( + queryClient, + 'ws-1', + JSON.stringify({ + chatId: 'chat-1', + type: 'completed', + streamId: 'new-stream', + timestamp: Date.now(), + }) + ) + + expect(queryClient.invalidateQueries).toHaveBeenCalledTimes(2) + expect(queryClient.invalidateQueries).toHaveBeenCalledWith({ + queryKey: taskKeys.list('ws-1'), + }) + expect(queryClient.invalidateQueries).toHaveBeenCalledWith({ + queryKey: taskKeys.detail('chat-1'), + }) + expect(queryClient.removeQueries).not.toHaveBeenCalled() + }) + + it('invalidates completed task detail when the completed stream is active', () => { + queryClient.getQueryData.mockReturnValue({ + id: 'chat-1', + title: null, + messages: [], + activeStreamId: 'stream-1', + resources: [], + }) + + handleTaskStatusEvent( + queryClient, + 'ws-1', + JSON.stringify({ + chatId: 'chat-1', + type: 'completed', + streamId: 'stream-1', + timestamp: Date.now(), + }) + ) + + expect(queryClient.invalidateQueries).toHaveBeenCalledTimes(2) + expect(queryClient.invalidateQueries).toHaveBeenCalledWith({ + queryKey: taskKeys.list('ws-1'), + }) + expect(queryClient.invalidateQueries).toHaveBeenCalledWith({ + queryKey: taskKeys.detail('chat-1'), + }) + expect(queryClient.removeQueries).not.toHaveBeenCalled() + }) + + it('invalidates the task list and detail for metadata-changing task events', () => { + handleTaskStatusEvent( + queryClient, + 'ws-1', + JSON.stringify({ + chatId: 'chat-1', + type: 'renamed', + timestamp: Date.now(), + }) + ) + + expect(queryClient.invalidateQueries).toHaveBeenCalledTimes(2) + expect(queryClient.invalidateQueries).toHaveBeenCalledWith({ + queryKey: taskKeys.list('ws-1'), + }) + expect(queryClient.invalidateQueries).toHaveBeenCalledWith({ + queryKey: taskKeys.detail('chat-1'), + }) + expect(queryClient.removeQueries).not.toHaveBeenCalled() + }) + + it('invalidates the task list and removes detail cache for deleted task events', () => { + handleTaskStatusEvent( + queryClient, + 'ws-1', + JSON.stringify({ + chatId: 'chat-1', + type: 'deleted', + timestamp: Date.now(), + }) + ) + + expect(queryClient.invalidateQueries).toHaveBeenCalledTimes(1) + expect(queryClient.invalidateQueries).toHaveBeenCalledWith({ + queryKey: taskKeys.list('ws-1'), + }) + expect(queryClient.removeQueries).toHaveBeenCalledTimes(1) + expect(queryClient.removeQueries).toHaveBeenCalledWith({ + queryKey: taskKeys.detail('chat-1'), + }) + }) + + it('invalidates the task list and detail for started task events', () => { + handleTaskStatusEvent( + queryClient, + 'ws-1', + JSON.stringify({ + chatId: 'chat-1', + type: 'started', + timestamp: Date.now(), + }) + ) + + expect(queryClient.invalidateQueries).toHaveBeenCalledTimes(2) + expect(queryClient.invalidateQueries).toHaveBeenCalledWith({ + queryKey: taskKeys.list('ws-1'), + }) + expect(queryClient.invalidateQueries).toHaveBeenCalledWith({ + queryKey: taskKeys.detail('chat-1'), + }) + expect(queryClient.removeQueries).not.toHaveBeenCalled() + }) + + it('keeps started task detail when an unkeyed started event races an active stream', () => { + queryClient.getQueryData.mockReturnValue({ + id: 'chat-1', + title: null, + messages: [{ id: 'new-stream' }, { id: 'live-assistant:new-stream' }], + activeStreamId: 'new-stream', + resources: [], + }) + + handleTaskStatusEvent( + queryClient, + 'ws-1', + JSON.stringify({ + chatId: 'chat-1', + type: 'started', + timestamp: Date.now(), + }) + ) + + expect(queryClient.invalidateQueries).toHaveBeenCalledTimes(1) + expect(queryClient.invalidateQueries).toHaveBeenCalledWith({ + queryKey: taskKeys.list('ws-1'), + }) + expect(queryClient.removeQueries).not.toHaveBeenCalled() + }) + + it('keeps started task detail when the started stream is already active', () => { + queryClient.getQueryData.mockReturnValue({ + id: 'chat-1', + title: null, + messages: [{ id: 'stream-1' }], + activeStreamId: 'stream-1', + resources: [], + }) + + handleTaskStatusEvent( + queryClient, + 'ws-1', + JSON.stringify({ + chatId: 'chat-1', + type: 'started', + streamId: 'stream-1', + timestamp: Date.now(), + }) + ) + + expect(queryClient.invalidateQueries).toHaveBeenCalledTimes(1) + expect(queryClient.invalidateQueries).toHaveBeenCalledWith({ + queryKey: taskKeys.list('ws-1'), + }) + expect(queryClient.removeQueries).not.toHaveBeenCalled() + }) + + it('keeps started task detail when a stale started stream is older than the active stream', () => { + queryClient.getQueryData.mockReturnValue({ + id: 'chat-1', + title: null, + messages: [{ id: 'old-stream' }, { id: 'new-stream' }], + activeStreamId: 'new-stream', + resources: [], + }) + + handleTaskStatusEvent( + queryClient, + 'ws-1', + JSON.stringify({ + chatId: 'chat-1', + type: 'started', + streamId: 'old-stream', + timestamp: Date.now(), + }) + ) + + expect(queryClient.invalidateQueries).toHaveBeenCalledTimes(1) + expect(queryClient.invalidateQueries).toHaveBeenCalledWith({ + queryKey: taskKeys.list('ws-1'), + }) + expect(queryClient.removeQueries).not.toHaveBeenCalled() + }) + + it('invalidates started task detail when a missing stream may be newer server state', () => { + queryClient.getQueryData.mockReturnValue({ + id: 'chat-1', + title: null, + messages: [{ id: 'old-stream' }], + activeStreamId: 'old-stream', + resources: [], + }) + handleTaskStatusEvent( queryClient, 'ws-1', JSON.stringify({ chatId: 'chat-1', type: 'started', + streamId: 'new-stream', + timestamp: Date.now(), + }) + ) + + expect(queryClient.invalidateQueries).toHaveBeenCalledTimes(2) + expect(queryClient.invalidateQueries).toHaveBeenCalledWith({ + queryKey: taskKeys.list('ws-1'), + }) + expect(queryClient.invalidateQueries).toHaveBeenCalledWith({ + queryKey: taskKeys.detail('chat-1'), + }) + expect(queryClient.removeQueries).not.toHaveBeenCalled() + }) + + it('keeps list invalidation only for unknown task event types', () => { + handleTaskStatusEvent( + queryClient, + 'ws-1', + JSON.stringify({ + chatId: 'chat-1', + type: 'archived', timestamp: Date.now(), }) ) @@ -48,11 +398,13 @@ describe('handleTaskStatusEvent', () => { expect(queryClient.invalidateQueries).toHaveBeenCalledWith({ queryKey: taskKeys.list('ws-1'), }) + expect(queryClient.removeQueries).not.toHaveBeenCalled() }) it('does not invalidate when task event payload is invalid', () => { handleTaskStatusEvent(queryClient, 'ws-1', '{') expect(queryClient.invalidateQueries).not.toHaveBeenCalled() + expect(queryClient.removeQueries).not.toHaveBeenCalled() }) }) diff --git a/apps/sim/hooks/use-task-events.ts b/apps/sim/hooks/use-task-events.ts index 0d6d4f7d0e9..b9a5216dad4 100644 --- a/apps/sim/hooks/use-task-events.ts +++ b/apps/sim/hooks/use-task-events.ts @@ -2,13 +2,68 @@ import { useEffect } from 'react' import { createLogger } from '@sim/logger' import type { QueryClient } from '@tanstack/react-query' import { useQueryClient } from '@tanstack/react-query' -import { taskKeys } from '@/hooks/queries/tasks' +import { getLiveAssistantMessageId } from '@/lib/copilot/chat/effective-transcript' +import { type TaskChatHistory, taskKeys } from '@/hooks/queries/tasks' const logger = createLogger('TaskEvents') +const TASK_STATUS_TYPES = ['started', 'completed', 'created', 'deleted', 'renamed'] as const +type TaskStatusEventType = (typeof TASK_STATUS_TYPES)[number] +const TASK_STATUS_TYPE_SET = new Set(TASK_STATUS_TYPES) + interface TaskStatusEventPayload { chatId?: string - type?: 'started' | 'completed' | 'created' | 'deleted' | 'renamed' + type?: TaskStatusEventType + streamId?: string +} + +const DETAIL_INVALIDATING_TASK_STATUS_TYPES = new Set([ + 'started', + 'completed', + 'renamed', +]) + +function isTaskStatusEventType(value: unknown): value is TaskStatusEventType { + return typeof value === 'string' && TASK_STATUS_TYPE_SET.has(value) +} + +function isLocalOptimisticActiveStream(current: TaskChatHistory | undefined) { + if (!current?.activeStreamId) return false + const liveAssistantId = getLiveAssistantMessageId(current.activeStreamId) + return current.messages.some((message) => message.id === liveAssistantId) +} + +/** + * Returns true when the cached active stream is known to be later in the + * chronological transcript than the stream that emitted this status event. + * If either stream is absent from the transcript, callers should refetch + * instead of inferring order from incomplete cache state. + */ +function hasNewerKnownActiveStream(current: TaskChatHistory | undefined, streamId: string) { + if (!current?.activeStreamId || current.activeStreamId === streamId) return false + + const activeIndex = current.messages.findIndex((message) => message.id === current.activeStreamId) + const eventStreamIndex = current.messages.findIndex((message) => message.id === streamId) + if (activeIndex === -1) return false + if (eventStreamIndex === -1) return false + return activeIndex > eventStreamIndex +} + +function shouldSkipDetailInvalidationForStreamEvent( + current: TaskChatHistory | undefined, + payload: TaskStatusEventPayload +) { + if (payload.type !== 'started' && payload.type !== 'completed') return false + if (!current?.activeStreamId) return false + if (!payload.streamId) return isLocalOptimisticActiveStream(current) + if (payload.type === 'started' && current.activeStreamId === payload.streamId) return true + if (current.activeStreamId === payload.streamId) return false + if (hasNewerKnownActiveStream(current, payload.streamId)) return true + return ( + payload.type === 'completed' && + isLocalOptimisticActiveStream(current) && + !current.messages.some((message) => message.id === payload.streamId) + ) } function parseTaskStatusEventPayload(data: unknown): TaskStatusEventPayload | null { @@ -30,14 +85,13 @@ function parseTaskStatusEventPayload(data: unknown): TaskStatusEventPayload | nu return { ...(typeof record.chatId === 'string' ? { chatId: record.chatId } : {}), - ...(typeof record.type === 'string' - ? { type: record.type as TaskStatusEventPayload['type'] } - : {}), + ...(isTaskStatusEventType(record.type) ? { type: record.type } : {}), + ...(typeof record.streamId === 'string' ? { streamId: record.streamId } : {}), } } export function handleTaskStatusEvent( - queryClient: Pick, + queryClient: Pick, workspaceId: string, data: unknown ): void { @@ -48,6 +102,20 @@ export function handleTaskStatusEvent( } queryClient.invalidateQueries({ queryKey: taskKeys.list(workspaceId) }) + if (!payload.chatId) return + if (payload.type === 'deleted') { + queryClient.removeQueries({ queryKey: taskKeys.detail(payload.chatId) }) + return + } + if (payload.type === 'started' || payload.type === 'completed') { + const current = queryClient.getQueryData(taskKeys.detail(payload.chatId)) + if (shouldSkipDetailInvalidationForStreamEvent(current, payload)) { + return + } + } + if (payload.type && DETAIL_INVALIDATING_TASK_STATUS_TYPES.has(payload.type)) { + queryClient.invalidateQueries({ queryKey: taskKeys.detail(payload.chatId) }) + } } /** diff --git a/apps/sim/lib/copilot/chat/post.ts b/apps/sim/lib/copilot/chat/post.ts index 21d94e56bb5..a745f209c9e 100644 --- a/apps/sim/lib/copilot/chat/post.ts +++ b/apps/sim/lib/copilot/chat/post.ts @@ -329,6 +329,7 @@ async function persistUserMessage(params: { workspaceId, chatId, type: 'started', + streamId: userMessageId, }) } @@ -430,6 +431,7 @@ function buildOnComplete(params: { workspaceId, chatId, type: 'completed', + streamId: userMessageId, }) } } catch (error) { @@ -461,6 +463,7 @@ function buildOnError(params: { workspaceId, chatId, type: 'completed', + streamId: userMessageId, }) } } catch (error) { diff --git a/apps/sim/lib/copilot/tasks.ts b/apps/sim/lib/copilot/tasks.ts index 5828a711cb4..db6594ebf28 100644 --- a/apps/sim/lib/copilot/tasks.ts +++ b/apps/sim/lib/copilot/tasks.ts @@ -13,6 +13,7 @@ interface TaskStatusEvent { workspaceId: string chatId: string type: 'started' | 'completed' | 'created' | 'deleted' | 'renamed' + streamId?: string } const channel = diff --git a/apps/sim/lib/mothership/inbox/executor.ts b/apps/sim/lib/mothership/inbox/executor.ts index b738a6a37ae..52236d9b959 100644 --- a/apps/sim/lib/mothership/inbox/executor.ts +++ b/apps/sim/lib/mothership/inbox/executor.ts @@ -131,11 +131,14 @@ export async function executeInboxTask(taskId: string): Promise { }) } + const userMessageId = generateId() + if (chatId) { taskPubSub?.publishStatusChanged({ workspaceId: ws.id, chatId, type: 'started', + streamId: userMessageId, }) } @@ -178,7 +181,6 @@ export async function executeInboxTask(taskId: string): Promise { } const messageContent = formatEmailAsMessage(truncatedTask, attachments) - const userMessageId = generateId() const requestPayload: Record = { message: messageContent, userId, @@ -244,6 +246,7 @@ export async function executeInboxTask(taskId: string): Promise { workspaceId: ws.id, chatId, type: 'completed', + streamId: userMessageId, }) } From 1166d8274ed1658b416cf5c5abf1ab6cd8e44e96 Mon Sep 17 00:00:00 2001 From: Theodore Li Date: Mon, 4 May 2026 19:42:06 -0700 Subject: [PATCH 13/15] feat(logs): add Logs block for querying execution logs from workflows (#4442) * feat(logs): add Logs block for querying execution logs from workflows * fix(logs): guard transformResponse on non-2xx and correct executionMetadata description - Add response.ok check in all three logs tools' transformResponse so a 4xx/5xx body cannot be silently treated as a success payload (defense in depth; the executor already throws on non-2xx before transform runs). - Drop totalTokens from executionMetadata description in block and tool outputs since the snapshot route does not emit it. --- apps/sim/app/api/logs/[id]/route.ts | 13 +- apps/sim/app/api/logs/route.ts | 13 +- apps/sim/blocks/blocks/logs.ts | 253 +++++++++++++++++++++++++++ apps/sim/blocks/registry.ts | 2 + apps/sim/tools/logs/get_execution.ts | 53 ++++++ apps/sim/tools/logs/get_log.ts | 50 ++++++ apps/sim/tools/logs/index.ts | 3 + apps/sim/tools/logs/query.ts | 132 ++++++++++++++ apps/sim/tools/logs/types.ts | 48 +++++ apps/sim/tools/registry.ts | 4 + 10 files changed, 561 insertions(+), 10 deletions(-) create mode 100644 apps/sim/blocks/blocks/logs.ts create mode 100644 apps/sim/tools/logs/get_execution.ts create mode 100644 apps/sim/tools/logs/get_log.ts create mode 100644 apps/sim/tools/logs/index.ts create mode 100644 apps/sim/tools/logs/query.ts create mode 100644 apps/sim/tools/logs/types.ts diff --git a/apps/sim/app/api/logs/[id]/route.ts b/apps/sim/app/api/logs/[id]/route.ts index 75f7378db20..5c0acd33e08 100644 --- a/apps/sim/app/api/logs/[id]/route.ts +++ b/apps/sim/app/api/logs/[id]/route.ts @@ -2,7 +2,7 @@ import { createLogger } from '@sim/logger' import { type NextRequest, NextResponse } from 'next/server' import { getLogDetailContract } from '@/lib/api/contracts/logs' import { parseRequest } from '@/lib/api/server' -import { getSession } from '@/lib/auth' +import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid' import { withRouteHandler } from '@/lib/core/utils/with-route-handler' import { fetchLogDetail } from '@/lib/logs/fetch-log-detail' @@ -10,9 +10,12 @@ const logger = createLogger('LogDetailsByIdAPI') export const GET = withRouteHandler( async (request: NextRequest, context: { params: Promise<{ id: string }> }) => { - const session = await getSession() - if (!session?.user?.id) { - return NextResponse.json({ error: 'Unauthorized' }, { status: 401 }) + const authResult = await checkSessionOrInternalAuth(request, { requireWorkflowId: false }) + if (!authResult.success || !authResult.userId) { + return NextResponse.json( + { error: authResult.error || 'Authentication required' }, + { status: 401 } + ) } const parsed = await parseRequest(getLogDetailContract, request, context) @@ -22,7 +25,7 @@ export const GET = withRouteHandler( const { workspaceId } = parsed.data.query const data = await fetchLogDetail({ - userId: session.user.id, + userId: authResult.userId, workspaceId, lookupColumn: 'id', lookupValue: id, diff --git a/apps/sim/app/api/logs/route.ts b/apps/sim/app/api/logs/route.ts index 73dcd600a24..cb3690441d2 100644 --- a/apps/sim/app/api/logs/route.ts +++ b/apps/sim/app/api/logs/route.ts @@ -29,7 +29,7 @@ import type { NextRequest } from 'next/server' import { NextResponse } from 'next/server' import { listLogsContract, type WorkflowLogSummary } from '@/lib/api/contracts/logs' import { parseRequest } from '@/lib/api/server' -import { getSession } from '@/lib/auth' +import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid' import { withRouteHandler } from '@/lib/core/utils/with-route-handler' import { buildFilterConditions } from '@/lib/logs/filters' @@ -58,11 +58,14 @@ function decodeCursor(cursor: string): CursorData | null { } export const GET = withRouteHandler(async (request: NextRequest) => { - const session = await getSession() - if (!session?.user?.id) { - return NextResponse.json({ error: 'Unauthorized' }, { status: 401 }) + const authResult = await checkSessionOrInternalAuth(request, { requireWorkflowId: false }) + if (!authResult.success || !authResult.userId) { + return NextResponse.json( + { error: authResult.error || 'Authentication required' }, + { status: 401 } + ) } - const userId = session.user.id + const userId = authResult.userId const parsed = await parseRequest(listLogsContract, request, {}) if (!parsed.success) return parsed.response diff --git a/apps/sim/blocks/blocks/logs.ts b/apps/sim/blocks/blocks/logs.ts new file mode 100644 index 00000000000..d7665089f98 --- /dev/null +++ b/apps/sim/blocks/blocks/logs.ts @@ -0,0 +1,253 @@ +import { Library } from '@/components/emcn/icons' +import type { BlockConfig } from '@/blocks/types' + +export const LogsBlock: BlockConfig = { + type: 'logs', + name: 'Logs', + description: 'Query workflow execution logs', + longDescription: + 'Search workflow execution logs in the current workspace, fetch a single log by id, or load full execution details with the per-block state snapshot.', + bgColor: '#EAB308', + bestPractices: ` + - The block always operates on the current workspace; you cannot query other workspaces. + - 'Query Logs' returns summary rows. To get a full log entry (executionData, files), use 'Get Log by ID' on a row's id. + - Use 'Get Execution Details' (with an executionId) to inspect per-block state for a single run. + - Pagination is cursor-based: pass the previous response's nextCursor as Cursor to fetch the next page. + `, + icon: Library, + category: 'blocks', + docsLink: 'https://docs.sim.ai/api-reference/logs/getExecutionDetails', + subBlocks: [ + { + id: 'operation', + title: 'Operation', + type: 'dropdown', + options: [ + { label: 'Query Logs', id: 'query' }, + { label: 'Get Log by ID', id: 'get_log' }, + { label: 'Get Execution Details', id: 'get_execution' }, + ], + placeholder: 'Select operation', + value: () => 'query', + }, + { + id: 'workflowIds', + title: 'Workflow IDs', + type: 'short-input', + placeholder: 'Comma-separated workflow IDs', + condition: { field: 'operation', value: 'query' }, + }, + { + id: 'executionId', + title: 'Execution ID', + type: 'short-input', + placeholder: 'Filter by a single execution ID', + condition: { field: 'operation', value: 'query' }, + }, + { + id: 'level', + title: 'Level', + type: 'dropdown', + options: [ + { label: 'All', id: 'all' }, + { label: 'Info', id: 'info' }, + { label: 'Error', id: 'error' }, + { label: 'Running', id: 'running' }, + { label: 'Pending', id: 'pending' }, + ], + value: () => 'all', + condition: { field: 'operation', value: 'query' }, + }, + { + id: 'triggers', + title: 'Triggers', + type: 'short-input', + placeholder: 'api,webhook,schedule,manual,chat,mothership', + condition: { field: 'operation', value: 'query' }, + }, + { + id: 'limit', + title: 'Limit', + type: 'short-input', + placeholder: '100 (max 200)', + condition: { field: 'operation', value: 'query' }, + }, + { + id: 'startDate', + title: 'Start Date', + type: 'short-input', + placeholder: 'ISO 8601 timestamp', + mode: 'advanced', + wandConfig: { + enabled: true, + prompt: + 'Generate an ISO 8601 timestamp from the user description. Return ONLY the timestamp string.', + generationType: 'timestamp', + }, + condition: { field: 'operation', value: 'query' }, + }, + { + id: 'endDate', + title: 'End Date', + type: 'short-input', + placeholder: 'ISO 8601 timestamp', + mode: 'advanced', + wandConfig: { + enabled: true, + prompt: + 'Generate an ISO 8601 timestamp from the user description. Return ONLY the timestamp string.', + generationType: 'timestamp', + }, + condition: { field: 'operation', value: 'query' }, + }, + { + id: 'search', + title: 'Search', + type: 'short-input', + placeholder: 'Free-text search', + mode: 'advanced', + condition: { field: 'operation', value: 'query' }, + }, + { + id: 'sortBy', + title: 'Sort By', + type: 'dropdown', + options: [ + { label: 'Date', id: 'date' }, + { label: 'Duration', id: 'duration' }, + { label: 'Cost', id: 'cost' }, + { label: 'Status', id: 'status' }, + ], + value: () => 'date', + mode: 'advanced', + condition: { field: 'operation', value: 'query' }, + }, + { + id: 'sortOrder', + title: 'Sort Order', + type: 'dropdown', + options: [ + { label: 'Descending', id: 'desc' }, + { label: 'Ascending', id: 'asc' }, + ], + value: () => 'desc', + mode: 'advanced', + condition: { field: 'operation', value: 'query' }, + }, + { + id: 'cursor', + title: 'Cursor', + type: 'short-input', + placeholder: 'nextCursor from a previous response', + mode: 'advanced', + condition: { field: 'operation', value: 'query' }, + }, + { + id: 'logId', + title: 'Log ID', + type: 'short-input', + placeholder: 'Log entry ID', + condition: { field: 'operation', value: 'get_log' }, + required: true, + }, + { + id: 'executionIdLookup', + title: 'Execution ID', + type: 'short-input', + placeholder: 'Execution ID', + condition: { field: 'operation', value: 'get_execution' }, + required: true, + }, + ], + tools: { + access: ['logs_query', 'logs_get', 'logs_get_execution'], + config: { + tool: (params: Record) => { + const operation = params.operation || 'query' + if (operation === 'get_log') return 'logs_get' + if (operation === 'get_execution') return 'logs_get_execution' + return 'logs_query' + }, + params: (params: Record) => { + const operation = params.operation || 'query' + + if (operation === 'get_log') { + if (!params.logId) { + throw new Error('Logs Block Error: Log ID is required for get_log operation') + } + return { id: params.logId } + } + + if (operation === 'get_execution') { + if (!params.executionIdLookup) { + throw new Error( + 'Logs Block Error: Execution ID is required for get_execution operation' + ) + } + return { executionId: params.executionIdLookup } + } + + const rawLimit = + params.limit !== undefined && params.limit !== null && params.limit !== '' + ? Number(params.limit) + : undefined + const limit = Number.isFinite(rawLimit) ? rawLimit : undefined + + return { + workflowIds: params.workflowIds || undefined, + executionId: params.executionId || undefined, + level: params.level && params.level !== 'all' ? params.level : undefined, + triggers: params.triggers || undefined, + limit, + startDate: params.startDate || undefined, + endDate: params.endDate || undefined, + search: params.search || undefined, + cursor: params.cursor || undefined, + sortBy: params.sortBy || undefined, + sortOrder: params.sortOrder || undefined, + } + }, + }, + }, + inputs: { + operation: { type: 'string', description: 'Operation to perform' }, + workflowIds: { type: 'string', description: 'Comma-separated workflow IDs' }, + executionId: { type: 'string', description: 'Execution ID filter (query operation)' }, + level: { type: 'string', description: 'Log level filter' }, + triggers: { type: 'string', description: 'Comma-separated triggers' }, + limit: { type: 'number', description: 'Max logs to return (default 100, max 200)' }, + startDate: { type: 'string', description: 'ISO 8601 lower bound' }, + endDate: { type: 'string', description: 'ISO 8601 upper bound' }, + search: { type: 'string', description: 'Free-text search term' }, + sortBy: { type: 'string', description: "'date' | 'duration' | 'cost' | 'status'" }, + sortOrder: { type: 'string', description: "'desc' | 'asc'" }, + cursor: { type: 'string', description: 'Pagination cursor' }, + logId: { type: 'string', description: 'Log entry ID (get_log operation)' }, + executionIdLookup: { + type: 'string', + description: 'Execution ID (get_execution operation)', + }, + }, + outputs: { + logs: { type: 'json', description: 'Array of log summary entries (query operation)' }, + nextCursor: { + type: 'string', + description: 'Cursor for next page; null when no more results (query operation)', + }, + log: { type: 'json', description: 'Full log entry (get_log operation)' }, + executionId: { type: 'string', description: 'Execution ID (get_execution operation)' }, + workflowId: { type: 'string', description: 'Workflow ID (get_execution operation)' }, + workflowState: { + type: 'json', + description: 'Per-block state snapshot (get_execution operation)', + }, + childWorkflowSnapshots: { + type: 'json', + description: 'Snapshots for child workflows (get_execution operation)', + }, + executionMetadata: { + type: 'json', + description: 'Trigger, timestamps, totalDurationMs, cost (get_execution operation)', + }, + }, +} diff --git a/apps/sim/blocks/registry.ts b/apps/sim/blocks/registry.ts index e7ca943af3c..aacf6d49431 100644 --- a/apps/sim/blocks/registry.ts +++ b/apps/sim/blocks/registry.ts @@ -113,6 +113,7 @@ import { LemlistBlock } from '@/blocks/blocks/lemlist' import { LinearBlock, LinearV2Block } from '@/blocks/blocks/linear' import { LinkedInBlock } from '@/blocks/blocks/linkedin' import { LinkupBlock } from '@/blocks/blocks/linkup' +import { LogsBlock } from '@/blocks/blocks/logs' import { LoopsBlock } from '@/blocks/blocks/loops' import { LumaBlock } from '@/blocks/blocks/luma' import { MailchimpBlock } from '@/blocks/blocks/mailchimp' @@ -361,6 +362,7 @@ export const registry: Record = { linear_v2: LinearV2Block, linkedin: LinkedInBlock, linkup: LinkupBlock, + logs: LogsBlock, loops: LoopsBlock, luma: LumaBlock, mailchimp: MailchimpBlock, diff --git a/apps/sim/tools/logs/get_execution.ts b/apps/sim/tools/logs/get_execution.ts new file mode 100644 index 00000000000..a62eef0525b --- /dev/null +++ b/apps/sim/tools/logs/get_execution.ts @@ -0,0 +1,53 @@ +import type { LogsGetExecutionParams, LogsGetExecutionResponse } from '@/tools/logs/types' +import type { ToolConfig } from '@/tools/types' + +export const logsGetExecutionTool: ToolConfig = { + id: 'logs_get_execution', + name: 'Get Execution Details', + description: + 'Fetch full execution details for a workflow run, including the per-block state snapshot.', + version: '1.0.0', + + params: { + executionId: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'Execution ID returned by a workflow run', + }, + }, + + request: { + url: (params) => `/api/logs/execution/${encodeURIComponent(params.executionId)}`, + method: 'GET', + headers: () => ({ + 'Content-Type': 'application/json', + }), + }, + + transformResponse: async (response): Promise => { + const data = await response.json() + if (!response.ok) { + throw new Error(data?.error || `Request failed with status ${response.status}`) + } + return { + success: true, + output: data, + } + }, + + outputs: { + executionId: { type: 'string', description: 'Execution ID' }, + workflowId: { type: 'string', description: 'Workflow ID this execution belongs to' }, + workflowState: { type: 'json', description: 'Per-block state snapshot for the execution' }, + childWorkflowSnapshots: { + type: 'json', + description: 'Snapshots for any child workflows invoked during the run', + optional: true, + }, + executionMetadata: { + type: 'json', + description: 'Trigger, timestamps, totalDurationMs, and cost for the run', + }, + }, +} diff --git a/apps/sim/tools/logs/get_log.ts b/apps/sim/tools/logs/get_log.ts new file mode 100644 index 00000000000..92e41e79b83 --- /dev/null +++ b/apps/sim/tools/logs/get_log.ts @@ -0,0 +1,50 @@ +import type { LogsGetParams, LogsGetResponse } from '@/tools/logs/types' +import type { ToolConfig } from '@/tools/types' + +export const logsGetTool: ToolConfig = { + id: 'logs_get', + name: 'Get Log by ID', + description: 'Fetch a single workflow execution log entry by its log ID.', + version: '1.0.0', + + params: { + id: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'Log entry ID', + }, + }, + + request: { + url: (params) => { + const workspaceId = params._context?.workspaceId + if (!workspaceId) { + throw new Error('workspaceId is required in execution context') + } + const qs = new URLSearchParams({ workspaceId }) + return `/api/logs/${encodeURIComponent(params.id)}?${qs.toString()}` + }, + method: 'GET', + headers: () => ({ + 'Content-Type': 'application/json', + }), + }, + + transformResponse: async (response): Promise => { + const result = await response.json() + if (!response.ok) { + throw new Error(result?.error || `Request failed with status ${response.status}`) + } + return { + success: true, + output: { + log: result.data, + }, + } + }, + + outputs: { + log: { type: 'json', description: 'Workflow execution log entry' }, + }, +} diff --git a/apps/sim/tools/logs/index.ts b/apps/sim/tools/logs/index.ts new file mode 100644 index 00000000000..109d223c8b8 --- /dev/null +++ b/apps/sim/tools/logs/index.ts @@ -0,0 +1,3 @@ +export { logsGetExecutionTool } from '@/tools/logs/get_execution' +export { logsGetTool } from '@/tools/logs/get_log' +export { logsQueryTool } from '@/tools/logs/query' diff --git a/apps/sim/tools/logs/query.ts b/apps/sim/tools/logs/query.ts new file mode 100644 index 00000000000..8ea660ee29a --- /dev/null +++ b/apps/sim/tools/logs/query.ts @@ -0,0 +1,132 @@ +import type { LogsQueryParams, LogsQueryResponse } from '@/tools/logs/types' +import type { ToolConfig } from '@/tools/types' + +export const logsQueryTool: ToolConfig = { + id: 'logs_query', + name: 'Query Logs', + description: 'Query workflow execution logs in the current workspace with filters.', + version: '1.0.0', + + params: { + workflowIds: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'Comma-separated workflow IDs to filter by', + }, + executionId: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'Filter logs to a single execution ID', + }, + level: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: + "Log level filter: 'all', 'info', 'error', 'running', 'pending'. Comma-separated for multiple.", + }, + triggers: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'Comma-separated triggers (api, webhook, schedule, manual, chat, mothership)', + }, + limit: { + type: 'number', + required: false, + visibility: 'user-or-llm', + description: 'Max logs to return (default 100, max 200)', + }, + cursor: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'Opaque pagination cursor returned by a previous query', + }, + sortBy: { + type: 'string', + required: false, + visibility: 'user-only', + description: "Sort field: 'date' (default), 'duration', 'cost', 'status'", + }, + sortOrder: { + type: 'string', + required: false, + visibility: 'user-only', + description: "Sort order: 'desc' (default) or 'asc'", + }, + startDate: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'ISO 8601 timestamp; only logs at or after this time', + }, + endDate: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'ISO 8601 timestamp; only logs at or before this time', + }, + search: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'Free-text search across log fields', + }, + }, + + request: { + url: (params) => { + const workspaceId = params._context?.workspaceId + if (!workspaceId) { + throw new Error('workspaceId is required in execution context') + } + const qs = new URLSearchParams({ workspaceId }) + if (params.workflowIds) qs.set('workflowIds', params.workflowIds) + if (params.executionId) qs.set('executionId', params.executionId) + if (params.level && params.level !== 'all') qs.set('level', params.level) + if (params.triggers) qs.set('triggers', params.triggers) + if (params.startDate) qs.set('startDate', params.startDate) + if (params.endDate) qs.set('endDate', params.endDate) + if (params.search) qs.set('search', params.search) + if (params.cursor) qs.set('cursor', params.cursor) + if (params.sortBy) qs.set('sortBy', params.sortBy) + if (params.sortOrder) qs.set('sortOrder', params.sortOrder) + if (params.limit !== undefined && params.limit !== null) { + qs.set('limit', String(params.limit)) + } + return `/api/logs?${qs.toString()}` + }, + method: 'GET', + headers: () => ({ + 'Content-Type': 'application/json', + }), + }, + + transformResponse: async (response): Promise => { + const result = await response.json() + if (!response.ok) { + throw new Error(result?.error || `Request failed with status ${response.status}`) + } + return { + success: true, + output: { + logs: result.data || [], + nextCursor: result.nextCursor ?? null, + }, + } + }, + + outputs: { + logs: { + type: 'array', + description: 'Array of workflow execution log entries', + }, + nextCursor: { + type: 'string', + description: 'Pagination cursor for the next page; null when no more results', + }, + }, +} diff --git a/apps/sim/tools/logs/types.ts b/apps/sim/tools/logs/types.ts new file mode 100644 index 00000000000..3053059b1f1 --- /dev/null +++ b/apps/sim/tools/logs/types.ts @@ -0,0 +1,48 @@ +import type { + ExecutionSnapshotData, + WorkflowLogDetail, + WorkflowLogSummary, +} from '@/lib/api/contracts/logs' +import type { ToolResponse, WorkflowToolExecutionContext } from '@/tools/types' + +export interface LogsQueryParams { + workflowIds?: string + executionId?: string + level?: string + triggers?: string + limit?: number + cursor?: string + sortBy?: 'date' | 'duration' | 'cost' | 'status' + sortOrder?: 'asc' | 'desc' + startDate?: string + endDate?: string + search?: string + _context?: WorkflowToolExecutionContext +} + +export interface LogsGetParams { + id: string + _context?: WorkflowToolExecutionContext +} + +export interface LogsGetExecutionParams { + executionId: string + _context?: WorkflowToolExecutionContext +} + +export interface LogsQueryResponse extends ToolResponse { + output: { + logs: WorkflowLogSummary[] + nextCursor: string | null + } +} + +export interface LogsGetResponse extends ToolResponse { + output: { + log: WorkflowLogDetail + } +} + +export interface LogsGetExecutionResponse extends ToolResponse { + output: ExecutionSnapshotData +} diff --git a/apps/sim/tools/registry.ts b/apps/sim/tools/registry.ts index ad7dd384867..9130ac52dee 100644 --- a/apps/sim/tools/registry.ts +++ b/apps/sim/tools/registry.ts @@ -1568,6 +1568,7 @@ import { import { linkedInGetProfileTool, linkedInSharePostTool } from '@/tools/linkedin' import { linkupSearchTool } from '@/tools/linkup' import { llmChatTool } from '@/tools/llm' +import { logsGetExecutionTool, logsGetTool, logsQueryTool } from '@/tools/logs' import { loopsCreateContactPropertyTool, loopsCreateContactTool, @@ -3204,6 +3205,9 @@ export const tools: Record = { ketch_set_consent: ketchSetConsentTool, ketch_set_subscriptions: ketchSetSubscriptionsTool, linkup_search: linkupSearchTool, + logs_query: logsQueryTool, + logs_get: logsGetTool, + logs_get_execution: logsGetExecutionTool, loops_create_contact: loopsCreateContactTool, loops_create_contact_property: loopsCreateContactPropertyTool, loops_update_contact: loopsUpdateContactTool, From 51addc57673ed02165e3e0590c8e7c69f938809d Mon Sep 17 00:00:00 2001 From: Waleed Date: Mon, 4 May 2026 20:23:39 -0700 Subject: [PATCH 14/15] fix(terminal): use wall-clock duration for loop iterations with concurrent children (#4443) --- .../components/terminal/utils.test.ts | 306 ++++++++++++++++++ .../[workflowId]/components/terminal/utils.ts | 8 +- 2 files changed, 308 insertions(+), 6 deletions(-) diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/terminal/utils.test.ts b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/terminal/utils.test.ts index e7677a608a7..ee07dba42dd 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/terminal/utils.test.ts +++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/terminal/utils.test.ts @@ -482,6 +482,312 @@ describe('groupEntriesByExecution', () => { }) }) +describe('duration computation', () => { + /** + * Regression guard for the 18m → 20m → 22m bug. + * + * When a loop iteration contains a parallel block, the iteration's displayed + * duration must be wall-clock (max(endedAt) − min(startedAt)), not the sum of + * child durationMs. Summing over concurrent parallel branches over-counts time + * and causes the displayed iteration duration to climb rapidly as each branch + * resolves. + */ + it('loop iteration with concurrent parallel branches uses wall-clock duration', () => { + const branches = 5 + const branchDurationMs = 110_000 + const loopIterStartMs = Date.UTC(2025, 0, 1, 0, 0, 0) + const loopIterEndMs = loopIterStartMs + branchDurationMs + + const entries: ConsoleEntry[] = [] + for (let branch = 0; branch < branches; branch++) { + entries.push( + makeEntry({ + blockId: 'function-1', + blockName: 'Function 1', + executionOrder: branch + 1, + startedAt: new Date(loopIterStartMs).toISOString(), + endedAt: new Date(loopIterEndMs).toISOString(), + durationMs: branchDurationMs, + iterationType: 'parallel', + iterationCurrent: branch, + iterationTotal: branches, + iterationContainerId: 'parallel-1', + parentIterations: [ + { + iterationType: 'loop', + iterationCurrent: 0, + iterationTotal: 1, + iterationContainerId: 'loop-1', + }, + ], + }) + ) + } + + const tree = buildEntryTree(entries) + const loopSubflow = tree.find((n) => n.entry.blockType === 'loop') + expect(loopSubflow).toBeDefined() + + const iteration = loopSubflow!.children[0] + expect(iteration.nodeType).toBe('iteration') + expect(iteration.entry.durationMs).toBe(branchDurationMs) + expect(iteration.entry.durationMs).toBeLessThan(branches * branchDurationMs) + }) + + it('subflow container with concurrent children uses wall-clock duration', () => { + const branches = 4 + const branchDurationMs = 60_000 + const startMs = Date.UTC(2025, 0, 1, 0, 0, 0) + const endMs = startMs + branchDurationMs + + const entries: ConsoleEntry[] = [] + for (let branch = 0; branch < branches; branch++) { + entries.push( + makeEntry({ + blockId: 'function-1', + executionOrder: branch + 1, + startedAt: new Date(startMs).toISOString(), + endedAt: new Date(endMs).toISOString(), + durationMs: branchDurationMs, + iterationType: 'parallel', + iterationCurrent: branch, + iterationTotal: branches, + iterationContainerId: 'parallel-1', + }) + ) + } + + const tree = buildEntryTree(entries) + const subflow = tree.find((n) => n.entry.blockType === 'parallel') + expect(subflow).toBeDefined() + expect(subflow!.entry.durationMs).toBe(branchDurationMs) + expect(subflow!.entry.durationMs).toBeLessThan(branches * branchDurationMs) + }) + + it('sequential loop iteration uses wall-clock duration', () => { + const blockStart = Date.UTC(2025, 0, 1, 0, 0, 0) + const blockEnd = blockStart + 5_000 + + const entries: ConsoleEntry[] = [ + makeEntry({ + blockId: 'function-1', + executionOrder: 1, + startedAt: new Date(blockStart).toISOString(), + endedAt: new Date(blockEnd).toISOString(), + durationMs: 5_000, + iterationType: 'loop', + iterationCurrent: 0, + iterationTotal: 1, + iterationContainerId: 'loop-1', + }), + ] + + const tree = buildEntryTree(entries) + const loop = tree.find((n) => n.entry.blockType === 'loop') + expect(loop).toBeDefined() + expect(loop!.children[0].entry.durationMs).toBe(5_000) + }) + + it('parallel iteration uses wall-clock duration', () => { + const start = Date.UTC(2025, 0, 1, 0, 0, 0) + const end = start + 7_500 + + const entries: ConsoleEntry[] = [ + makeEntry({ + blockId: 'function-1', + executionOrder: 1, + startedAt: new Date(start).toISOString(), + endedAt: new Date(end).toISOString(), + durationMs: 7_500, + iterationType: 'parallel', + iterationCurrent: 0, + iterationTotal: 1, + iterationContainerId: 'parallel-1', + }), + ] + + const tree = buildEntryTree(entries) + const parallel = tree.find((n) => n.entry.blockType === 'parallel') + expect(parallel).toBeDefined() + expect(parallel!.children[0].entry.durationMs).toBe(7_500) + }) + + it('sequential loop with gaps between iterations: each iteration is wall-clock of its own children', () => { + const entries: ConsoleEntry[] = [] + const iterStarts = [0, 10_000, 30_000] + const blockDuration = 1_000 + const base = Date.UTC(2025, 0, 1, 0, 0, 0) + + for (let i = 0; i < iterStarts.length; i++) { + entries.push( + makeEntry({ + blockId: 'function-1', + executionOrder: i + 1, + startedAt: new Date(base + iterStarts[i]).toISOString(), + endedAt: new Date(base + iterStarts[i] + blockDuration).toISOString(), + durationMs: blockDuration, + iterationType: 'loop', + iterationCurrent: i, + iterationTotal: 3, + iterationContainerId: 'loop-1', + }) + ) + } + + const tree = buildEntryTree(entries) + const loop = tree.find((n) => n.entry.blockType === 'loop')! + for (let i = 0; i < 3; i++) { + expect(loop.children[i].entry.durationMs).toBe(blockDuration) + } + expect(loop.entry.durationMs).toBe(iterStarts[2] + blockDuration - iterStarts[0]) + }) + + it('loop-in-loop: outer iteration duration spans all inner iterations wall-clock', () => { + const entries: ConsoleEntry[] = [] + const base = Date.UTC(2025, 0, 1, 0, 0, 0) + const innerDuration = 2_000 + const innerCount = 3 + + for (let inner = 0; inner < innerCount; inner++) { + const start = base + inner * innerDuration + entries.push( + makeEntry({ + blockId: 'function-1', + executionOrder: inner + 1, + startedAt: new Date(start).toISOString(), + endedAt: new Date(start + innerDuration).toISOString(), + durationMs: innerDuration, + iterationType: 'loop', + iterationCurrent: inner, + iterationTotal: innerCount, + iterationContainerId: 'inner-loop', + parentIterations: [ + { + iterationType: 'loop', + iterationCurrent: 0, + iterationTotal: 1, + iterationContainerId: 'outer-loop', + }, + ], + }) + ) + } + + const tree = buildEntryTree(entries) + const outerLoop = tree.find((n) => n.entry.blockType === 'loop')! + const outerIter = outerLoop.children[0] + expect(outerIter.entry.durationMs).toBe(innerCount * innerDuration) + }) + + it('loop-in-parallel: each branch duration reflects its own loop wall-clock', () => { + const entries: ConsoleEntry[] = [] + const base = Date.UTC(2025, 0, 1, 0, 0, 0) + const innerDuration = 1_500 + const innerCount = 2 + const branches = 3 + + for (let branch = 0; branch < branches; branch++) { + for (let inner = 0; inner < innerCount; inner++) { + const start = base + inner * innerDuration + entries.push( + makeEntry({ + blockId: 'function-1', + executionOrder: branch * innerCount + inner + 1, + startedAt: new Date(start).toISOString(), + endedAt: new Date(start + innerDuration).toISOString(), + durationMs: innerDuration, + iterationType: 'loop', + iterationCurrent: inner, + iterationTotal: innerCount, + iterationContainerId: 'inner-loop', + parentIterations: [ + { + iterationType: 'parallel', + iterationCurrent: branch, + iterationTotal: branches, + iterationContainerId: 'parallel-1', + }, + ], + }) + ) + } + } + + const tree = buildEntryTree(entries) + const parallelSubflow = tree.find((n) => n.entry.blockType === 'parallel')! + expect(parallelSubflow.children).toHaveLength(branches) + for (let branch = 0; branch < branches; branch++) { + const branchNode = parallelSubflow.children[branch] + expect(branchNode.entry.durationMs).toBe(innerCount * innerDuration) + } + expect(parallelSubflow.entry.durationMs).toBe(innerCount * innerDuration) + }) + + it('single-block iteration: duration equals the block durationMs', () => { + const start = Date.UTC(2025, 0, 1, 0, 0, 0) + const blockDuration = 3_141 + + const entries: ConsoleEntry[] = [ + makeEntry({ + blockId: 'function-1', + executionOrder: 1, + startedAt: new Date(start).toISOString(), + endedAt: new Date(start + blockDuration).toISOString(), + durationMs: blockDuration, + iterationType: 'loop', + iterationCurrent: 0, + iterationTotal: 1, + iterationContainerId: 'loop-1', + }), + ] + + const tree = buildEntryTree(entries) + const loop = tree.find((n) => n.entry.blockType === 'loop')! + expect(loop.children[0].entry.durationMs).toBe(blockDuration) + expect(loop.entry.durationMs).toBe(blockDuration) + }) + + it('does not sum concurrent branch durations into iteration duration', () => { + const branches = 20 + const branchDurationMs = 100_000 + const start = Date.UTC(2025, 0, 1, 0, 0, 0) + + const entries: ConsoleEntry[] = [] + for (let branch = 0; branch < branches; branch++) { + const branchStart = start + branch * 5 + entries.push( + makeEntry({ + blockId: 'function-1', + executionOrder: branch + 1, + startedAt: new Date(branchStart).toISOString(), + endedAt: new Date(branchStart + branchDurationMs).toISOString(), + durationMs: branchDurationMs, + iterationType: 'parallel', + iterationCurrent: branch, + iterationTotal: branches, + iterationContainerId: 'parallel-1', + parentIterations: [ + { + iterationType: 'loop', + iterationCurrent: 0, + iterationTotal: 1, + iterationContainerId: 'loop-1', + }, + ], + }) + ) + } + + const tree = buildEntryTree(entries) + const loopSubflow = tree.find((n) => n.entry.blockType === 'loop')! + const iteration = loopSubflow.children[0] + + const wallClock = branchDurationMs + (branches - 1) * 5 + expect(iteration.entry.durationMs).toBe(wallClock) + expect(iteration.entry.durationMs).toBeLessThan(branches * branchDurationMs) + }) +}) + describe('flattenVisibleExecutionRows', () => { it('only includes children for expanded nodes', () => { const childBlock = makeEntry({ diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/terminal/utils.ts b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/terminal/utils.ts index e4c450d9c7d..347a5ffbc2f 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/terminal/utils.ts +++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/terminal/utils.ts @@ -393,9 +393,7 @@ export function buildEntryTree(entries: ConsoleEntry[], idPrefix = ''): EntryNod const subflowEndMs = Math.max( ...allRelevantBlocks.map((b) => new Date(b.endedAt || b.timestamp).getTime()) ) - const totalDuration = allRelevantBlocks.reduce((sum, b) => sum + (b.durationMs || 0), 0) - const subflowDuration = - iterationType === 'parallel' ? subflowEndMs - subflowStartMs : totalDuration + const subflowDuration = subflowEndMs - subflowStartMs const subflowExecutionOrder = Math.min(...allRelevantBlocks.map((b) => b.executionOrder)) const metadataSource = allRelevantBlocks[0] @@ -449,9 +447,7 @@ export function buildEntryTree(entries: ConsoleEntry[], idPrefix = ''): EntryNod const iterEndMs = Math.max( ...allIterEntries.map((b) => new Date(b.endedAt || b.timestamp).getTime()) ) - const iterDuration = allIterEntries.reduce((sum, b) => sum + (b.durationMs || 0), 0) - const iterDisplayDuration = - iterationType === 'parallel' ? iterEndMs - iterStartMs : iterDuration + const iterDisplayDuration = iterEndMs - iterStartMs const iterExecutionOrder = Math.min(...allIterEntries.map((b) => b.executionOrder)) const iterMetadataSource = allIterEntries[0] From e14a3a5fa9f389198f31860040e7e0a70a4443f3 Mon Sep 17 00:00:00 2001 From: Waleed Date: Mon, 4 May 2026 20:56:18 -0700 Subject: [PATCH 15/15] fix(tables): suppress phantom rows on sort, center gutter numbers, stop select-all viewport jump (#4445) * fix(tables): suppress phantom rows on sort, center gutter numbers, stop select-all viewport jump * fix(tables): suppress scroll on Ctrl+A select-all Cmd/Ctrl+A duplicates the select-all logic but missed the suppressFocusScrollRef flag, so the keyboard path still triggered the viewport jump. Co-Authored-By: Claude Opus 4.7 --------- Co-authored-by: Claude Opus 4.7 --- .../[tableId]/components/table/table.tsx | 66 +++++++++++-------- 1 file changed, 40 insertions(+), 26 deletions(-) diff --git a/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table/table.tsx b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table/table.tsx index 6d624c8dd7d..08ba169d1e1 100644 --- a/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table/table.tsx +++ b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table/table.tsx @@ -173,6 +173,7 @@ export function Table({ const containerRef = useRef(null) const scrollRef = useRef(null) const isDraggingRef = useRef(false) + const suppressFocusScrollRef = useRef(false) const { tableData, @@ -796,6 +797,7 @@ export function Table({ if (rws.length === 0 || currentCols.length === 0) return setEditingCell(null) setCheckedRows((prev) => (prev.size === 0 ? prev : EMPTY_CHECKED_ROWS)) + suppressFocusScrollRef.current = true setSelectionAnchor({ rowIndex: 0, colIndex: 0 }) setSelectionFocus({ rowIndex: maxPositionRef.current, @@ -1155,6 +1157,10 @@ export function Table({ useEffect(() => { if (isColumnSelection) return + if (suppressFocusScrollRef.current) { + suppressFocusScrollRef.current = false + return + } const target = selectionFocus ?? selectionAnchor if (!target) return const { rowIndex, colIndex } = target @@ -1296,6 +1302,7 @@ export function Table({ const rws = rowsRef.current const currentCols = columnsRef.current if (rws.length > 0 && currentCols.length > 0) { + suppressFocusScrollRef.current = true setEditingCell(null) setCheckedRows((prev) => (prev.size === 0 ? prev : EMPTY_CHECKED_ROWS)) setSelectionAnchor({ rowIndex: 0, colIndex: 0 }) @@ -2685,7 +2692,10 @@ export function Table({ <> {rows.map((row, index) => { const prevPosition = index > 0 ? rows[index - 1].position : -1 - const gapCount = queryOptions.filter ? 0 : row.position - prevPosition - 1 + const gapCount = + queryOptions.filter || queryOptions.sort + ? 0 + : row.position - prevPosition - 1 return ( {gapCount > 0 && ( @@ -2938,7 +2948,7 @@ export function Table({ } const GAP_ROW_LIMIT = 200 -const GAP_CHECKBOX_CLASS = cn(CELL_CHECKBOX, 'group/checkbox cursor-pointer text-center') +const GAP_CHECKBOX_CLASS = cn(CELL_CHECKBOX, 'cursor-pointer') interface PositionGapRowsProps { count: number @@ -2975,28 +2985,32 @@ const PositionGapRows = React.memo( const isGapChecked = checkedRows.has(position) return ( - { - if (e.button !== 0) return - onRowToggle(position, e.shiftKey) - }} - > - - {position + 1} - -
- + +
+
{ + if (e.button !== 0) return + onRowToggle(position, e.shiftKey) + }} + > + + {position + 1} + +
+ +
+
{columns.map((col, colIndex) => { @@ -3238,7 +3252,7 @@ const DataRow = React.memo(function DataRow({ return ( onContextMenu(e, row)}> -
+
{ @@ -3268,7 +3282,7 @@ const DataRow = React.memo(function DataRow({ type='button' aria-label={runningCount > 0 ? `Stop ${runningCount} running` : 'Run row'} title={runningCount > 0 ? `Stop ${runningCount} running` : 'Run row'} - className='flex h-[20px] w-[20px] shrink-0 items-center justify-center rounded text-[var(--text-primary)] transition-colors hover-hover:bg-[var(--surface-2)]' + className='ml-auto flex h-[20px] w-[20px] shrink-0 items-center justify-center rounded text-[var(--text-primary)] transition-colors hover-hover:bg-[var(--surface-2)]' onClick={() => { if (runningCount > 0) { onStopRow(row.id)