diff --git a/apps/sim/app/api/chat/manage/[id]/route.test.ts b/apps/sim/app/api/chat/manage/[id]/route.test.ts index 48e65146db5..5808f2cbb58 100644 --- a/apps/sim/app/api/chat/manage/[id]/route.test.ts +++ b/apps/sim/app/api/chat/manage/[id]/route.test.ts @@ -15,12 +15,12 @@ const { mockLimit, mockUpdate, mockSet, - mockDelete, mockCreateSuccessResponse, mockCreateErrorResponse, mockEncryptSecret, mockCheckChatAccess, mockDeployWorkflow, + mockPerformChatUndeploy, mockLogger, } = vi.hoisted(() => { const logger = { @@ -40,12 +40,12 @@ const { mockLimit: vi.fn(), mockUpdate: vi.fn(), mockSet: vi.fn(), - mockDelete: vi.fn(), mockCreateSuccessResponse: vi.fn(), mockCreateErrorResponse: vi.fn(), mockEncryptSecret: vi.fn(), mockCheckChatAccess: vi.fn(), mockDeployWorkflow: vi.fn(), + mockPerformChatUndeploy: vi.fn(), mockLogger: logger, } }) @@ -66,7 +66,6 @@ vi.mock('@sim/db', () => ({ db: { select: mockSelect, update: mockUpdate, - delete: mockDelete, }, })) vi.mock('@sim/db/schema', () => ({ @@ -88,6 +87,9 @@ vi.mock('@/app/api/chat/utils', () => ({ vi.mock('@/lib/workflows/persistence/utils', () => ({ deployWorkflow: mockDeployWorkflow, })) +vi.mock('@/lib/workflows/orchestration', () => ({ + performChatUndeploy: mockPerformChatUndeploy, +})) vi.mock('drizzle-orm', () => ({ and: vi.fn((...conditions: unknown[]) => ({ type: 'and', conditions })), eq: vi.fn((field: unknown, value: unknown) => ({ field, value, type: 'eq' })), @@ -106,7 +108,7 @@ describe('Chat Edit API Route', () => { mockWhere.mockReturnValue({ limit: mockLimit }) mockUpdate.mockReturnValue({ set: mockSet }) mockSet.mockReturnValue({ where: mockWhere }) - mockDelete.mockReturnValue({ where: mockWhere }) + mockPerformChatUndeploy.mockResolvedValue({ success: true }) mockCreateSuccessResponse.mockImplementation((data) => { return new Response(JSON.stringify(data), { @@ -428,7 +430,11 @@ describe('Chat Edit API Route', () => { const response = await DELETE(req, { params: Promise.resolve({ id: 'chat-123' }) }) expect(response.status).toBe(200) - expect(mockDelete).toHaveBeenCalled() + expect(mockPerformChatUndeploy).toHaveBeenCalledWith({ + chatId: 'chat-123', + userId: 'user-id', + workspaceId: 'workspace-123', + }) const data = await response.json() expect(data.message).toBe('Chat deployment deleted successfully') }) @@ -451,7 +457,11 @@ describe('Chat Edit API Route', () => { expect(response.status).toBe(200) expect(mockCheckChatAccess).toHaveBeenCalledWith('chat-123', 'admin-user-id') - expect(mockDelete).toHaveBeenCalled() + expect(mockPerformChatUndeploy).toHaveBeenCalledWith({ + chatId: 'chat-123', + userId: 'admin-user-id', + workspaceId: 'workspace-123', + }) }) }) }) diff --git a/apps/sim/app/api/chat/manage/[id]/route.ts b/apps/sim/app/api/chat/manage/[id]/route.ts index 1585d380057..c09688c99d6 100644 --- a/apps/sim/app/api/chat/manage/[id]/route.ts +++ b/apps/sim/app/api/chat/manage/[id]/route.ts @@ -9,6 +9,7 @@ import { getSession } from '@/lib/auth' import { isDev } from '@/lib/core/config/feature-flags' import { encryptSecret } from '@/lib/core/security/encryption' import { getEmailDomain } from '@/lib/core/utils/urls' +import { performChatUndeploy } from '@/lib/workflows/orchestration' import { deployWorkflow } from '@/lib/workflows/persistence/utils' import { checkChatAccess } from '@/app/api/chat/utils' import { createErrorResponse, createSuccessResponse } from '@/app/api/workflows/utils' @@ -270,33 +271,25 @@ export async function DELETE( return createErrorResponse('Unauthorized', 401) } - const { - hasAccess, - chat: chatRecord, - workspaceId: chatWorkspaceId, - } = await checkChatAccess(chatId, session.user.id) + const { hasAccess, workspaceId: chatWorkspaceId } = await checkChatAccess( + chatId, + session.user.id + ) if (!hasAccess) { return createErrorResponse('Chat not found or access denied', 404) } - await db.delete(chat).where(eq(chat.id, chatId)) - - logger.info(`Chat "${chatId}" deleted successfully`) - - recordAudit({ - workspaceId: chatWorkspaceId || null, - actorId: session.user.id, - actorName: session.user.name, - actorEmail: session.user.email, - action: AuditAction.CHAT_DELETED, - resourceType: AuditResourceType.CHAT, - resourceId: chatId, - resourceName: chatRecord?.title || chatId, - description: `Deleted chat deployment "${chatRecord?.title || chatId}"`, - request: _request, + const result = await performChatUndeploy({ + chatId, + userId: session.user.id, + workspaceId: chatWorkspaceId, }) + if (!result.success) { + return createErrorResponse(result.error || 'Failed to delete chat', 500) + } + return createSuccessResponse({ message: 'Chat deployment deleted successfully', }) diff --git a/apps/sim/app/api/chat/route.test.ts b/apps/sim/app/api/chat/route.test.ts index d590d6e506b..47840515e98 100644 --- a/apps/sim/app/api/chat/route.test.ts +++ b/apps/sim/app/api/chat/route.test.ts @@ -3,7 +3,7 @@ * * @vitest-environment node */ -import { auditMock, createEnvMock } from '@sim/testing' +import { createEnvMock } from '@sim/testing' import { NextRequest } from 'next/server' import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest' @@ -12,66 +12,51 @@ const { mockFrom, mockWhere, mockLimit, - mockInsert, - mockValues, - mockReturning, mockCreateSuccessResponse, mockCreateErrorResponse, - mockEncryptSecret, mockCheckWorkflowAccessForChatCreation, - mockDeployWorkflow, + mockPerformChatDeploy, mockGetSession, - mockUuidV4, } = vi.hoisted(() => ({ mockSelect: vi.fn(), mockFrom: vi.fn(), mockWhere: vi.fn(), mockLimit: vi.fn(), - mockInsert: vi.fn(), - mockValues: vi.fn(), - mockReturning: vi.fn(), mockCreateSuccessResponse: vi.fn(), mockCreateErrorResponse: vi.fn(), - mockEncryptSecret: vi.fn(), mockCheckWorkflowAccessForChatCreation: vi.fn(), - mockDeployWorkflow: vi.fn(), + mockPerformChatDeploy: vi.fn(), mockGetSession: vi.fn(), - mockUuidV4: vi.fn(), })) -vi.mock('@/lib/audit/log', () => auditMock) - vi.mock('@sim/db', () => ({ db: { select: mockSelect, - insert: mockInsert, }, })) vi.mock('@sim/db/schema', () => ({ - chat: { userId: 'userId', identifier: 'identifier' }, + chat: { userId: 'userId', identifier: 'identifier', archivedAt: 'archivedAt' }, workflow: { id: 'id', userId: 'userId', isDeployed: 'isDeployed' }, })) +vi.mock('drizzle-orm', () => ({ + and: vi.fn((...conditions: unknown[]) => ({ type: 'and', conditions })), + eq: vi.fn((field: unknown, value: unknown) => ({ field, value, type: 'eq' })), + isNull: vi.fn((field: unknown) => ({ type: 'isNull', field })), +})) + vi.mock('@/app/api/workflows/utils', () => ({ createSuccessResponse: mockCreateSuccessResponse, createErrorResponse: mockCreateErrorResponse, })) -vi.mock('@/lib/core/security/encryption', () => ({ - encryptSecret: mockEncryptSecret, -})) - -vi.mock('uuid', () => ({ - v4: mockUuidV4, -})) - vi.mock('@/app/api/chat/utils', () => ({ checkWorkflowAccessForChatCreation: mockCheckWorkflowAccessForChatCreation, })) -vi.mock('@/lib/workflows/persistence/utils', () => ({ - deployWorkflow: mockDeployWorkflow, +vi.mock('@/lib/workflows/orchestration', () => ({ + performChatDeploy: mockPerformChatDeploy, })) vi.mock('@/lib/auth', () => ({ @@ -94,10 +79,6 @@ describe('Chat API Route', () => { mockSelect.mockReturnValue({ from: mockFrom }) mockFrom.mockReturnValue({ where: mockWhere }) mockWhere.mockReturnValue({ limit: mockLimit }) - mockInsert.mockReturnValue({ values: mockValues }) - mockValues.mockReturnValue({ returning: mockReturning }) - - mockUuidV4.mockReturnValue('test-uuid') mockCreateSuccessResponse.mockImplementation((data) => { return new Response(JSON.stringify(data), { @@ -113,12 +94,10 @@ describe('Chat API Route', () => { }) }) - mockEncryptSecret.mockResolvedValue({ encrypted: 'encrypted-password' }) - - mockDeployWorkflow.mockResolvedValue({ + mockPerformChatDeploy.mockResolvedValue({ success: true, - version: 1, - deployedAt: new Date(), + chatId: 'test-uuid', + chatUrl: 'http://localhost:3000/chat/test-chat', }) }) @@ -277,7 +256,6 @@ describe('Chat API Route', () => { hasAccess: true, workflow: { userId: 'user-id', workspaceId: null, isDeployed: true }, }) - mockReturning.mockResolvedValue([{ id: 'test-uuid' }]) const req = new NextRequest('http://localhost:3000/api/chat', { method: 'POST', @@ -287,6 +265,13 @@ describe('Chat API Route', () => { expect(response.status).toBe(200) expect(mockCheckWorkflowAccessForChatCreation).toHaveBeenCalledWith('workflow-123', 'user-id') + expect(mockPerformChatDeploy).toHaveBeenCalledWith( + expect.objectContaining({ + workflowId: 'workflow-123', + userId: 'user-id', + identifier: 'test-chat', + }) + ) }) it('should allow chat deployment when user has workspace admin permission', async () => { @@ -309,7 +294,6 @@ describe('Chat API Route', () => { hasAccess: true, workflow: { userId: 'other-user-id', workspaceId: 'workspace-123', isDeployed: true }, }) - mockReturning.mockResolvedValue([{ id: 'test-uuid' }]) const req = new NextRequest('http://localhost:3000/api/chat', { method: 'POST', @@ -319,6 +303,12 @@ describe('Chat API Route', () => { expect(response.status).toBe(200) expect(mockCheckWorkflowAccessForChatCreation).toHaveBeenCalledWith('workflow-123', 'user-id') + expect(mockPerformChatDeploy).toHaveBeenCalledWith( + expect.objectContaining({ + workflowId: 'workflow-123', + workspaceId: 'workspace-123', + }) + ) }) it('should reject when workflow is in workspace but user lacks admin permission', async () => { @@ -383,7 +373,7 @@ describe('Chat API Route', () => { expect(mockCheckWorkflowAccessForChatCreation).toHaveBeenCalledWith('workflow-123', 'user-id') }) - it('should auto-deploy workflow if not already deployed', async () => { + it('should call performChatDeploy for undeployed workflow', async () => { mockGetSession.mockResolvedValue({ user: { id: 'user-id', email: 'user@example.com' }, }) @@ -403,7 +393,6 @@ describe('Chat API Route', () => { hasAccess: true, workflow: { userId: 'user-id', workspaceId: null, isDeployed: false }, }) - mockReturning.mockResolvedValue([{ id: 'test-uuid' }]) const req = new NextRequest('http://localhost:3000/api/chat', { method: 'POST', @@ -412,10 +401,12 @@ describe('Chat API Route', () => { const response = await POST(req) expect(response.status).toBe(200) - expect(mockDeployWorkflow).toHaveBeenCalledWith({ - workflowId: 'workflow-123', - deployedBy: 'user-id', - }) + expect(mockPerformChatDeploy).toHaveBeenCalledWith( + expect.objectContaining({ + workflowId: 'workflow-123', + userId: 'user-id', + }) + ) }) }) }) diff --git a/apps/sim/app/api/chat/route.ts b/apps/sim/app/api/chat/route.ts index b7233238017..c9528715d9d 100644 --- a/apps/sim/app/api/chat/route.ts +++ b/apps/sim/app/api/chat/route.ts @@ -3,14 +3,9 @@ import { chat } from '@sim/db/schema' import { createLogger } from '@sim/logger' import { and, eq, isNull } from 'drizzle-orm' import type { NextRequest } from 'next/server' -import { v4 as uuidv4 } from 'uuid' import { z } from 'zod' -import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log' import { getSession } from '@/lib/auth' -import { isDev } from '@/lib/core/config/feature-flags' -import { encryptSecret } from '@/lib/core/security/encryption' -import { getBaseUrl } from '@/lib/core/utils/urls' -import { deployWorkflow } from '@/lib/workflows/persistence/utils' +import { performChatDeploy } from '@/lib/workflows/orchestration' import { checkWorkflowAccessForChatCreation } from '@/app/api/chat/utils' import { createErrorResponse, createSuccessResponse } from '@/app/api/workflows/utils' @@ -109,7 +104,6 @@ export async function POST(request: NextRequest) { ) } - // Check identifier availability and workflow access in parallel const [existingIdentifier, { hasAccess, workflow: workflowRecord }] = await Promise.all([ db .select() @@ -127,121 +121,27 @@ export async function POST(request: NextRequest) { return createErrorResponse('Workflow not found or access denied', 404) } - // Always deploy/redeploy the workflow to ensure latest version - const result = await deployWorkflow({ - workflowId, - deployedBy: session.user.id, - }) - - if (!result.success) { - return createErrorResponse(result.error || 'Failed to deploy workflow', 500) - } - - logger.info( - `${workflowRecord.isDeployed ? 'Redeployed' : 'Auto-deployed'} workflow ${workflowId} for chat (v${result.version})` - ) - - // Encrypt password if provided - let encryptedPassword = null - if (authType === 'password' && password) { - const { encrypted } = await encryptSecret(password) - encryptedPassword = encrypted - } - - // Create the chat deployment - const id = uuidv4() - - // Log the values we're inserting - logger.info('Creating chat deployment with values:', { - workflowId, - identifier, - title, - authType, - hasPassword: !!encryptedPassword, - emailCount: allowedEmails?.length || 0, - outputConfigsCount: outputConfigs.length, - }) - - // Merge customizations with the additional fields - const mergedCustomizations = { - ...(customizations || {}), - primaryColor: customizations?.primaryColor || 'var(--brand-hover)', - welcomeMessage: customizations?.welcomeMessage || 'Hi there! How can I help you today?', - } - - await db.insert(chat).values({ - id, + const result = await performChatDeploy({ workflowId, userId: session.user.id, identifier, title, - description: description || null, - customizations: mergedCustomizations, - isActive: true, + description, + customizations, authType, - password: encryptedPassword, - allowedEmails: authType === 'email' || authType === 'sso' ? allowedEmails : [], + password, + allowedEmails, outputConfigs, - createdAt: new Date(), - updatedAt: new Date(), + workspaceId: workflowRecord.workspaceId, }) - // Return successful response with chat URL - // Generate chat URL using path-based routing instead of subdomains - const baseUrl = getBaseUrl() - - let chatUrl: string - try { - const url = new URL(baseUrl) - let host = url.host - if (host.startsWith('www.')) { - host = host.substring(4) - } - chatUrl = `${url.protocol}//${host}/chat/${identifier}` - } catch (error) { - logger.warn('Failed to parse baseUrl, falling back to defaults:', { - baseUrl, - error: error instanceof Error ? error.message : 'Unknown error', - }) - // Fallback based on environment - if (isDev) { - chatUrl = `http://localhost:3000/chat/${identifier}` - } else { - chatUrl = `https://sim.ai/chat/${identifier}` - } - } - - logger.info(`Chat "${title}" deployed successfully at ${chatUrl}`) - - try { - const { PlatformEvents } = await import('@/lib/core/telemetry') - PlatformEvents.chatDeployed({ - chatId: id, - workflowId, - authType, - hasOutputConfigs: outputConfigs.length > 0, - }) - } catch (_e) { - // Silently fail + if (!result.success) { + return createErrorResponse(result.error || 'Failed to deploy chat', 500) } - recordAudit({ - workspaceId: workflowRecord.workspaceId || null, - actorId: session.user.id, - actorName: session.user.name, - actorEmail: session.user.email, - action: AuditAction.CHAT_DEPLOYED, - resourceType: AuditResourceType.CHAT, - resourceId: id, - resourceName: title, - description: `Deployed chat "${title}"`, - metadata: { workflowId, identifier, authType }, - request, - }) - return createSuccessResponse({ - id, - chatUrl, + id: result.chatId, + chatUrl: result.chatUrl, message: 'Chat deployment created successfully', }) } catch (validationError) { diff --git a/apps/sim/app/api/folders/[id]/route.test.ts b/apps/sim/app/api/folders/[id]/route.test.ts index ecdcc2c4b5a..ee13e3a6c3f 100644 --- a/apps/sim/app/api/folders/[id]/route.test.ts +++ b/apps/sim/app/api/folders/[id]/route.test.ts @@ -6,7 +6,14 @@ import { auditMock, createMockRequest, type MockUser } from '@sim/testing' import { beforeEach, describe, expect, it, vi } from 'vitest' -const { mockGetSession, mockGetUserEntityPermissions, mockLogger, mockDbRef } = vi.hoisted(() => { +const { + mockGetSession, + mockGetUserEntityPermissions, + mockLogger, + mockDbRef, + mockPerformDeleteFolder, + mockCheckForCircularReference, +} = vi.hoisted(() => { const logger = { info: vi.fn(), warn: vi.fn(), @@ -21,6 +28,8 @@ const { mockGetSession, mockGetUserEntityPermissions, mockLogger, mockDbRef } = mockGetUserEntityPermissions: vi.fn(), mockLogger: logger, mockDbRef: { current: null as any }, + mockPerformDeleteFolder: vi.fn(), + mockCheckForCircularReference: vi.fn(), } }) @@ -39,6 +48,12 @@ vi.mock('@sim/db', () => ({ return mockDbRef.current }, })) +vi.mock('@/lib/workflows/orchestration', () => ({ + performDeleteFolder: mockPerformDeleteFolder, +})) +vi.mock('@/lib/workflows/utils', () => ({ + checkForCircularReference: mockCheckForCircularReference, +})) import { DELETE, PUT } from '@/app/api/folders/[id]/route' @@ -144,6 +159,11 @@ describe('Individual Folder API Route', () => { mockGetUserEntityPermissions.mockResolvedValue('admin') mockDbRef.current = createFolderDbMock() + mockPerformDeleteFolder.mockResolvedValue({ + success: true, + deletedItems: { folders: 1, workflows: 0 }, + }) + mockCheckForCircularReference.mockResolvedValue(false) }) describe('PUT /api/folders/[id]', () => { @@ -369,13 +389,17 @@ describe('Individual Folder API Route', () => { it('should prevent circular references when updating parent', async () => { mockAuthenticatedUser() - const circularCheckResults = [{ parentId: 'folder-2' }, { parentId: 'folder-3' }] - mockDbRef.current = createFolderDbMock({ - folderLookupResult: { id: 'folder-3', parentId: null, name: 'Folder 3' }, - circularCheckResults, + folderLookupResult: { + id: 'folder-3', + parentId: null, + name: 'Folder 3', + workspaceId: 'workspace-123', + }, }) + mockCheckForCircularReference.mockResolvedValue(true) + const req = createMockRequest('PUT', { name: 'Updated Folder 3', parentId: 'folder-1', @@ -388,6 +412,7 @@ describe('Individual Folder API Route', () => { const data = await response.json() expect(data).toHaveProperty('error', 'Cannot create circular folder reference') + expect(mockCheckForCircularReference).toHaveBeenCalledWith('folder-3', 'folder-1') }) }) @@ -409,6 +434,12 @@ describe('Individual Folder API Route', () => { const data = await response.json() expect(data).toHaveProperty('success', true) expect(data).toHaveProperty('deletedItems') + expect(mockPerformDeleteFolder).toHaveBeenCalledWith({ + folderId: 'folder-1', + workspaceId: 'workspace-123', + userId: TEST_USER.id, + folderName: 'Test Folder', + }) }) it('should return 401 for unauthenticated delete requests', async () => { @@ -472,6 +503,7 @@ describe('Individual Folder API Route', () => { const data = await response.json() expect(data).toHaveProperty('success', true) + expect(mockPerformDeleteFolder).toHaveBeenCalled() }) it('should handle database errors during deletion', async () => { diff --git a/apps/sim/app/api/folders/[id]/route.ts b/apps/sim/app/api/folders/[id]/route.ts index 41b9a6276cb..cc25ecd770e 100644 --- a/apps/sim/app/api/folders/[id]/route.ts +++ b/apps/sim/app/api/folders/[id]/route.ts @@ -1,12 +1,12 @@ import { db } from '@sim/db' -import { workflow, workflowFolder } from '@sim/db/schema' +import { workflowFolder } from '@sim/db/schema' import { createLogger } from '@sim/logger' -import { and, eq, isNull } from 'drizzle-orm' +import { eq } from 'drizzle-orm' import { type NextRequest, NextResponse } from 'next/server' import { z } from 'zod' -import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log' import { getSession } from '@/lib/auth' -import { archiveWorkflowsByIdsInWorkspace } from '@/lib/workflows/lifecycle' +import { performDeleteFolder } from '@/lib/workflows/orchestration' +import { checkForCircularReference } from '@/lib/workflows/utils' import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils' const logger = createLogger('FoldersIDAPI') @@ -130,7 +130,6 @@ export async function DELETE( return NextResponse.json({ error: 'Folder not found' }, { status: 404 }) } - // Check if user has admin permissions for the workspace (admin-only for deletions) const workspacePermission = await getUserEntityPermissions( session.user.id, 'workspace', @@ -144,170 +143,25 @@ export async function DELETE( ) } - // Check if deleting this folder would delete the last workflow(s) in the workspace - const workflowsInFolder = await countWorkflowsInFolderRecursively( - id, - existingFolder.workspaceId - ) - const totalWorkflowsInWorkspace = await db - .select({ id: workflow.id }) - .from(workflow) - .where(and(eq(workflow.workspaceId, existingFolder.workspaceId), isNull(workflow.archivedAt))) - - if (workflowsInFolder > 0 && workflowsInFolder >= totalWorkflowsInWorkspace.length) { - return NextResponse.json( - { error: 'Cannot delete folder containing the only workflow(s) in the workspace' }, - { status: 400 } - ) - } - - // Recursively delete folder and all its contents - const deletionStats = await deleteFolderRecursively(id, existingFolder.workspaceId) - - logger.info('Deleted folder and all contents:', { - id, - deletionStats, - }) - - recordAudit({ + const result = await performDeleteFolder({ + folderId: id, workspaceId: existingFolder.workspaceId, - actorId: session.user.id, - actorName: session.user.name, - actorEmail: session.user.email, - action: AuditAction.FOLDER_DELETED, - resourceType: AuditResourceType.FOLDER, - resourceId: id, - resourceName: existingFolder.name, - description: `Deleted folder "${existingFolder.name}"`, - metadata: { - affected: { - workflows: deletionStats.workflows, - subfolders: deletionStats.folders - 1, - }, - }, - request, + userId: session.user.id, + folderName: existingFolder.name, }) + if (!result.success) { + const status = + result.errorCode === 'not_found' ? 404 : result.errorCode === 'validation' ? 400 : 500 + return NextResponse.json({ error: result.error }, { status }) + } + return NextResponse.json({ success: true, - deletedItems: deletionStats, + deletedItems: result.deletedItems, }) } catch (error) { logger.error('Error deleting folder:', { error }) return NextResponse.json({ error: 'Internal server error' }, { status: 500 }) } } - -// Helper function to recursively delete a folder and all its contents -async function deleteFolderRecursively( - folderId: string, - workspaceId: string -): Promise<{ folders: number; workflows: number }> { - const stats = { folders: 0, workflows: 0 } - - // Get all child folders first (workspace-scoped, not user-scoped) - const childFolders = await db - .select({ id: workflowFolder.id }) - .from(workflowFolder) - .where(and(eq(workflowFolder.parentId, folderId), eq(workflowFolder.workspaceId, workspaceId))) - - // Recursively delete child folders - for (const childFolder of childFolders) { - const childStats = await deleteFolderRecursively(childFolder.id, workspaceId) - stats.folders += childStats.folders - stats.workflows += childStats.workflows - } - - // Delete all workflows in this folder (workspace-scoped, not user-scoped) - // The database cascade will handle deleting related workflow_blocks, workflow_edges, workflow_subflows - const workflowsInFolder = await db - .select({ id: workflow.id }) - .from(workflow) - .where( - and( - eq(workflow.folderId, folderId), - eq(workflow.workspaceId, workspaceId), - isNull(workflow.archivedAt) - ) - ) - - if (workflowsInFolder.length > 0) { - await archiveWorkflowsByIdsInWorkspace( - workspaceId, - workflowsInFolder.map((entry) => entry.id), - { requestId: `folder-${folderId}` } - ) - - stats.workflows += workflowsInFolder.length - } - - // Delete this folder - await db.delete(workflowFolder).where(eq(workflowFolder.id, folderId)) - - stats.folders += 1 - - return stats -} - -/** - * Counts the number of workflows in a folder and all its subfolders recursively. - */ -async function countWorkflowsInFolderRecursively( - folderId: string, - workspaceId: string -): Promise { - let count = 0 - - const workflowsInFolder = await db - .select({ id: workflow.id }) - .from(workflow) - .where( - and( - eq(workflow.folderId, folderId), - eq(workflow.workspaceId, workspaceId), - isNull(workflow.archivedAt) - ) - ) - - count += workflowsInFolder.length - - const childFolders = await db - .select({ id: workflowFolder.id }) - .from(workflowFolder) - .where(and(eq(workflowFolder.parentId, folderId), eq(workflowFolder.workspaceId, workspaceId))) - - for (const childFolder of childFolders) { - count += await countWorkflowsInFolderRecursively(childFolder.id, workspaceId) - } - - return count -} - -// Helper function to check for circular references -async function checkForCircularReference(folderId: string, parentId: string): Promise { - let currentParentId: string | null = parentId - const visited = new Set() - - while (currentParentId) { - if (visited.has(currentParentId)) { - return true // Circular reference detected - } - - if (currentParentId === folderId) { - return true // Would create a cycle - } - - visited.add(currentParentId) - - // Get the parent of the current parent - const parent: { parentId: string | null } | undefined = await db - .select({ parentId: workflowFolder.parentId }) - .from(workflowFolder) - .where(eq(workflowFolder.id, currentParentId)) - .then((rows) => rows[0]) - - currentParentId = parent?.parentId || null - } - - return false -} diff --git a/apps/sim/app/api/skills/route.ts b/apps/sim/app/api/skills/route.ts index 224edf44dee..a45539d20cc 100644 --- a/apps/sim/app/api/skills/route.ts +++ b/apps/sim/app/api/skills/route.ts @@ -1,6 +1,7 @@ import { createLogger } from '@sim/logger' import { type NextRequest, NextResponse } from 'next/server' import { z } from 'zod' +import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log' import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid' import { generateRequestId } from '@/lib/core/utils/request' import { deleteSkill, listSkills, upsertSkills } from '@/lib/workflows/skills/operations' @@ -96,6 +97,18 @@ export async function POST(req: NextRequest) { requestId, }) + for (const skill of resultSkills) { + recordAudit({ + workspaceId, + actorId: userId, + action: AuditAction.SKILL_CREATED, + resourceType: AuditResourceType.SKILL, + resourceId: skill.id, + resourceName: skill.name, + description: `Created/updated skill "${skill.name}"`, + }) + } + return NextResponse.json({ success: true, data: resultSkills }) } catch (validationError) { if (validationError instanceof z.ZodError) { @@ -158,6 +171,15 @@ export async function DELETE(request: NextRequest) { return NextResponse.json({ error: 'Skill not found' }, { status: 404 }) } + recordAudit({ + workspaceId, + actorId: authResult.userId, + action: AuditAction.SKILL_DELETED, + resourceType: AuditResourceType.SKILL, + resourceId: skillId, + description: `Deleted skill`, + }) + logger.info(`[${requestId}] Deleted skill: ${skillId}`) return NextResponse.json({ success: true }) } catch (error) { diff --git a/apps/sim/app/api/tools/custom/route.ts b/apps/sim/app/api/tools/custom/route.ts index 9979a378f37..6bcbf553067 100644 --- a/apps/sim/app/api/tools/custom/route.ts +++ b/apps/sim/app/api/tools/custom/route.ts @@ -4,6 +4,7 @@ import { createLogger } from '@sim/logger' import { and, desc, eq, isNull, or } from 'drizzle-orm' import { type NextRequest, NextResponse } from 'next/server' import { z } from 'zod' +import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log' import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid' import { generateRequestId } from '@/lib/core/utils/request' import { upsertCustomTools } from '@/lib/workflows/custom-tools/operations' @@ -166,6 +167,18 @@ export async function POST(req: NextRequest) { requestId, }) + for (const tool of resultTools) { + recordAudit({ + workspaceId, + actorId: userId, + action: AuditAction.CUSTOM_TOOL_CREATED, + resourceType: AuditResourceType.CUSTOM_TOOL, + resourceId: tool.id, + resourceName: tool.title, + description: `Created/updated custom tool "${tool.title}"`, + }) + } + return NextResponse.json({ success: true, data: resultTools }) } catch (validationError) { if (validationError instanceof z.ZodError) { @@ -265,6 +278,15 @@ export async function DELETE(request: NextRequest) { // Delete the tool await db.delete(customTools).where(eq(customTools.id, toolId)) + recordAudit({ + workspaceId: tool.workspaceId || undefined, + actorId: userId, + action: AuditAction.CUSTOM_TOOL_DELETED, + resourceType: AuditResourceType.CUSTOM_TOOL, + resourceId: toolId, + description: `Deleted custom tool`, + }) + logger.info(`[${requestId}] Deleted tool: ${toolId}`) return NextResponse.json({ success: true }) } catch (error) { diff --git a/apps/sim/app/api/v1/admin/workflows/[id]/deploy/route.ts b/apps/sim/app/api/v1/admin/workflows/[id]/deploy/route.ts index 9a6eeba491b..d6b195fe78f 100644 --- a/apps/sim/app/api/v1/admin/workflows/[id]/deploy/route.ts +++ b/apps/sim/app/api/v1/admin/workflows/[id]/deploy/route.ts @@ -1,25 +1,7 @@ -import { db, workflowDeploymentVersion } from '@sim/db' import { createLogger } from '@sim/logger' -import { and, eq } from 'drizzle-orm' import { generateRequestId } from '@/lib/core/utils/request' -import { removeMcpToolsForWorkflow, syncMcpToolsForWorkflow } from '@/lib/mcp/workflow-mcp-sync' -import { - cleanupWebhooksForWorkflow, - restorePreviousVersionWebhooks, - saveTriggerWebhooksForDeploy, -} from '@/lib/webhooks/deploy' import { getActiveWorkflowRecord } from '@/lib/workflows/active-context' -import { - activateWorkflowVersionById, - deployWorkflow, - loadWorkflowFromNormalizedTables, - undeployWorkflow, -} from '@/lib/workflows/persistence/utils' -import { - cleanupDeploymentVersion, - createSchedulesForDeploy, - validateWorkflowSchedules, -} from '@/lib/workflows/schedules' +import { performFullDeploy, performFullUndeploy } from '@/lib/workflows/orchestration' import { withAdminAuthParams } from '@/app/api/v1/admin/middleware' import { badRequestResponse, @@ -31,12 +13,19 @@ import type { AdminDeployResult, AdminUndeployResult } from '@/app/api/v1/admin/ const logger = createLogger('AdminWorkflowDeployAPI') -const ADMIN_ACTOR_ID = 'admin-api' - interface RouteParams { id: string } +/** + * POST — Deploy a workflow via admin API. + * + * `userId` is set to the workflow owner so that webhook credential resolution + * (OAuth token lookups for providers like Airtable, Attio, etc.) uses a real + * user. `actorId` is set to `'admin-api'` so that the `deployedBy` field on + * the deployment version and audit log entries are correctly attributed to an + * admin action rather than the workflow owner. + */ export const POST = withAdminAuthParams(async (request, context) => { const { id: workflowId } = await context.params const requestId = generateRequestId() @@ -48,140 +37,28 @@ export const POST = withAdminAuthParams(async (request, context) => return notFoundResponse('Workflow') } - const normalizedData = await loadWorkflowFromNormalizedTables(workflowId) - if (!normalizedData) { - return badRequestResponse('Workflow has no saved state') - } - - const scheduleValidation = validateWorkflowSchedules(normalizedData.blocks) - if (!scheduleValidation.isValid) { - return badRequestResponse(`Invalid schedule configuration: ${scheduleValidation.error}`) - } - - const [currentActiveVersion] = await db - .select({ id: workflowDeploymentVersion.id }) - .from(workflowDeploymentVersion) - .where( - and( - eq(workflowDeploymentVersion.workflowId, workflowId), - eq(workflowDeploymentVersion.isActive, true) - ) - ) - .limit(1) - const previousVersionId = currentActiveVersion?.id - - const rollbackDeployment = async () => { - if (previousVersionId) { - await restorePreviousVersionWebhooks({ - request, - workflow: workflowData, - userId: workflowRecord.userId, - previousVersionId, - requestId, - }) - const reactivateResult = await activateWorkflowVersionById({ - workflowId, - deploymentVersionId: previousVersionId, - }) - if (reactivateResult.success) { - return - } - } - - await undeployWorkflow({ workflowId }) - } - - const deployResult = await deployWorkflow({ + const result = await performFullDeploy({ workflowId, - deployedBy: ADMIN_ACTOR_ID, - workflowName: workflowRecord.name, - }) - - if (!deployResult.success) { - return internalErrorResponse(deployResult.error || 'Failed to deploy workflow') - } - - if (!deployResult.deploymentVersionId) { - await undeployWorkflow({ workflowId }) - return internalErrorResponse('Failed to resolve deployment version') - } - - const workflowData = workflowRecord as Record - - const triggerSaveResult = await saveTriggerWebhooksForDeploy({ - request, - workflowId, - workflow: workflowData, userId: workflowRecord.userId, - blocks: normalizedData.blocks, + workflowName: workflowRecord.name, requestId, - deploymentVersionId: deployResult.deploymentVersionId, - previousVersionId, + request, + actorId: 'admin-api', }) - if (!triggerSaveResult.success) { - await cleanupDeploymentVersion({ - workflowId, - workflow: workflowData, - requestId, - deploymentVersionId: deployResult.deploymentVersionId, - }) - await rollbackDeployment() - return internalErrorResponse( - triggerSaveResult.error?.message || 'Failed to sync trigger configuration' - ) - } - - const scheduleResult = await createSchedulesForDeploy( - workflowId, - normalizedData.blocks, - db, - deployResult.deploymentVersionId - ) - if (!scheduleResult.success) { - logger.error( - `[${requestId}] Admin API: Schedule creation failed for workflow ${workflowId}: ${scheduleResult.error}` - ) - await cleanupDeploymentVersion({ - workflowId, - workflow: workflowData, - requestId, - deploymentVersionId: deployResult.deploymentVersionId, - }) - await rollbackDeployment() - return internalErrorResponse(scheduleResult.error || 'Failed to create schedule') - } - - if (previousVersionId && previousVersionId !== deployResult.deploymentVersionId) { - try { - logger.info(`[${requestId}] Admin API: Cleaning up previous version ${previousVersionId}`) - await cleanupDeploymentVersion({ - workflowId, - workflow: workflowData, - requestId, - deploymentVersionId: previousVersionId, - skipExternalCleanup: true, - }) - } catch (cleanupError) { - logger.error( - `[${requestId}] Admin API: Failed to clean up previous version ${previousVersionId}`, - cleanupError - ) - } + if (!result.success) { + if (result.errorCode === 'not_found') return notFoundResponse('Workflow state') + if (result.errorCode === 'validation') return badRequestResponse(result.error!) + return internalErrorResponse(result.error || 'Failed to deploy workflow') } - logger.info( - `[${requestId}] Admin API: Deployed workflow ${workflowId} as v${deployResult.version}` - ) - - // Sync MCP tools with the latest parameter schema - await syncMcpToolsForWorkflow({ workflowId, requestId, context: 'deploy' }) + logger.info(`[${requestId}] Admin API: Deployed workflow ${workflowId} as v${result.version}`) const response: AdminDeployResult = { isDeployed: true, - version: deployResult.version!, - deployedAt: deployResult.deployedAt!.toISOString(), - warnings: triggerSaveResult.warnings, + version: result.version!, + deployedAt: result.deployedAt!.toISOString(), + warnings: result.warnings, } return singleResponse(response) @@ -191,7 +68,7 @@ export const POST = withAdminAuthParams(async (request, context) => } }) -export const DELETE = withAdminAuthParams(async (request, context) => { +export const DELETE = withAdminAuthParams(async (_request, context) => { const { id: workflowId } = await context.params const requestId = generateRequestId() @@ -202,19 +79,17 @@ export const DELETE = withAdminAuthParams(async (request, context) return notFoundResponse('Workflow') } - const result = await undeployWorkflow({ workflowId }) + const result = await performFullUndeploy({ + workflowId, + userId: workflowRecord.userId, + requestId, + actorId: 'admin-api', + }) + if (!result.success) { return internalErrorResponse(result.error || 'Failed to undeploy workflow') } - await cleanupWebhooksForWorkflow( - workflowId, - workflowRecord as Record, - requestId - ) - - await removeMcpToolsForWorkflow(workflowId, requestId) - logger.info(`Admin API: Undeployed workflow ${workflowId}`) const response: AdminUndeployResult = { diff --git a/apps/sim/app/api/v1/admin/workflows/[id]/route.ts b/apps/sim/app/api/v1/admin/workflows/[id]/route.ts index ad8644aa49a..927e6fee9d1 100644 --- a/apps/sim/app/api/v1/admin/workflows/[id]/route.ts +++ b/apps/sim/app/api/v1/admin/workflows/[id]/route.ts @@ -13,12 +13,12 @@ */ import { db } from '@sim/db' -import { templates, workflowBlocks, workflowEdges } from '@sim/db/schema' +import { workflowBlocks, workflowEdges } from '@sim/db/schema' import { createLogger } from '@sim/logger' import { count, eq } from 'drizzle-orm' import { NextResponse } from 'next/server' import { getActiveWorkflowRecord } from '@/lib/workflows/active-context' -import { archiveWorkflow } from '@/lib/workflows/lifecycle' +import { performDeleteWorkflow } from '@/lib/workflows/orchestration' import { withAdminAuthParams } from '@/app/api/v1/admin/middleware' import { internalErrorResponse, @@ -69,7 +69,7 @@ export const GET = withAdminAuthParams(async (request, context) => } }) -export const DELETE = withAdminAuthParams(async (request, context) => { +export const DELETE = withAdminAuthParams(async (_request, context) => { const { id: workflowId } = await context.params try { @@ -79,12 +79,18 @@ export const DELETE = withAdminAuthParams(async (request, context) return notFoundResponse('Workflow') } - await db.update(templates).set({ workflowId: null }).where(eq(templates.workflowId, workflowId)) - - await archiveWorkflow(workflowId, { + const result = await performDeleteWorkflow({ + workflowId, + userId: workflowData.userId, + skipLastWorkflowGuard: true, requestId: `admin-workflow-${workflowId}`, + actorId: 'admin-api', }) + if (!result.success) { + return internalErrorResponse(result.error || 'Failed to delete workflow') + } + logger.info(`Admin API: Deleted workflow ${workflowId} (${workflowData.name})`) return NextResponse.json({ success: true, workflowId }) diff --git a/apps/sim/app/api/v1/admin/workflows/[id]/versions/[versionId]/activate/route.ts b/apps/sim/app/api/v1/admin/workflows/[id]/versions/[versionId]/activate/route.ts index 1824c6508f4..418390592fd 100644 --- a/apps/sim/app/api/v1/admin/workflows/[id]/versions/[versionId]/activate/route.ts +++ b/apps/sim/app/api/v1/admin/workflows/[id]/versions/[versionId]/activate/route.ts @@ -1,16 +1,7 @@ -import { db, workflowDeploymentVersion } from '@sim/db' import { createLogger } from '@sim/logger' -import { and, eq } from 'drizzle-orm' import { generateRequestId } from '@/lib/core/utils/request' -import { syncMcpToolsForWorkflow } from '@/lib/mcp/workflow-mcp-sync' -import { restorePreviousVersionWebhooks, saveTriggerWebhooksForDeploy } from '@/lib/webhooks/deploy' import { getActiveWorkflowRecord } from '@/lib/workflows/active-context' -import { activateWorkflowVersion } from '@/lib/workflows/persistence/utils' -import { - cleanupDeploymentVersion, - createSchedulesForDeploy, - validateWorkflowSchedules, -} from '@/lib/workflows/schedules' +import { performActivateVersion } from '@/lib/workflows/orchestration' import { withAdminAuthParams } from '@/app/api/v1/admin/middleware' import { badRequestResponse, @@ -18,7 +9,6 @@ import { notFoundResponse, singleResponse, } from '@/app/api/v1/admin/responses' -import type { BlockState } from '@/stores/workflows/workflow/types' const logger = createLogger('AdminWorkflowActivateVersionAPI') @@ -43,144 +33,22 @@ export const POST = withAdminAuthParams(async (request, context) => return badRequestResponse('Invalid version number') } - const [versionRow] = await db - .select({ - id: workflowDeploymentVersion.id, - state: workflowDeploymentVersion.state, - }) - .from(workflowDeploymentVersion) - .where( - and( - eq(workflowDeploymentVersion.workflowId, workflowId), - eq(workflowDeploymentVersion.version, versionNum) - ) - ) - .limit(1) - - if (!versionRow?.state) { - return notFoundResponse('Deployment version') - } - - const [currentActiveVersion] = await db - .select({ id: workflowDeploymentVersion.id }) - .from(workflowDeploymentVersion) - .where( - and( - eq(workflowDeploymentVersion.workflowId, workflowId), - eq(workflowDeploymentVersion.isActive, true) - ) - ) - .limit(1) - - const previousVersionId = currentActiveVersion?.id - - const deployedState = versionRow.state as { blocks?: Record } - const blocks = deployedState.blocks - if (!blocks || typeof blocks !== 'object') { - return internalErrorResponse('Invalid deployed state structure') - } - - const workflowData = workflowRecord as Record - - const scheduleValidation = validateWorkflowSchedules(blocks) - if (!scheduleValidation.isValid) { - return badRequestResponse(`Invalid schedule configuration: ${scheduleValidation.error}`) - } - - const triggerSaveResult = await saveTriggerWebhooksForDeploy({ - request, + const result = await performActivateVersion({ workflowId, - workflow: workflowData, + version: versionNum, userId: workflowRecord.userId, - blocks, + workflow: workflowRecord as Record, requestId, - deploymentVersionId: versionRow.id, - previousVersionId, - forceRecreateSubscriptions: true, + request, + actorId: 'admin-api', }) - if (!triggerSaveResult.success) { - logger.error( - `[${requestId}] Admin API: Failed to sync triggers for workflow ${workflowId}`, - triggerSaveResult.error - ) - return internalErrorResponse( - triggerSaveResult.error?.message || 'Failed to sync trigger configuration' - ) - } - - const scheduleResult = await createSchedulesForDeploy(workflowId, blocks, db, versionRow.id) - - if (!scheduleResult.success) { - await cleanupDeploymentVersion({ - workflowId, - workflow: workflowData, - requestId, - deploymentVersionId: versionRow.id, - }) - if (previousVersionId) { - await restorePreviousVersionWebhooks({ - request, - workflow: workflowData, - userId: workflowRecord.userId, - previousVersionId, - requestId, - }) - } - return internalErrorResponse(scheduleResult.error || 'Failed to sync schedules') - } - - const result = await activateWorkflowVersion({ workflowId, version: versionNum }) if (!result.success) { - await cleanupDeploymentVersion({ - workflowId, - workflow: workflowData, - requestId, - deploymentVersionId: versionRow.id, - }) - if (previousVersionId) { - await restorePreviousVersionWebhooks({ - request, - workflow: workflowData, - userId: workflowRecord.userId, - previousVersionId, - requestId, - }) - } - if (result.error === 'Deployment version not found') { - return notFoundResponse('Deployment version') - } + if (result.errorCode === 'not_found') return notFoundResponse('Deployment version') + if (result.errorCode === 'validation') return badRequestResponse(result.error!) return internalErrorResponse(result.error || 'Failed to activate version') } - if (previousVersionId && previousVersionId !== versionRow.id) { - try { - logger.info( - `[${requestId}] Admin API: Cleaning up previous version ${previousVersionId} webhooks/schedules` - ) - await cleanupDeploymentVersion({ - workflowId, - workflow: workflowData, - requestId, - deploymentVersionId: previousVersionId, - skipExternalCleanup: true, - }) - logger.info(`[${requestId}] Admin API: Previous version cleanup completed`) - } catch (cleanupError) { - logger.error( - `[${requestId}] Admin API: Failed to clean up previous version ${previousVersionId}`, - cleanupError - ) - } - } - - await syncMcpToolsForWorkflow({ - workflowId, - requestId, - state: versionRow.state, - context: 'activate', - }) - logger.info( `[${requestId}] Admin API: Activated version ${versionNum} for workflow ${workflowId}` ) @@ -189,14 +57,12 @@ export const POST = withAdminAuthParams(async (request, context) => success: true, version: versionNum, deployedAt: result.deployedAt!.toISOString(), - warnings: triggerSaveResult.warnings, + warnings: result.warnings, }) } catch (error) { logger.error( `[${requestId}] Admin API: Failed to activate version for workflow ${workflowId}`, - { - error, - } + { error } ) return internalErrorResponse('Failed to activate deployment version') } diff --git a/apps/sim/app/api/workflows/[id]/deploy/route.ts b/apps/sim/app/api/workflows/[id]/deploy/route.ts index fe84eda30c1..c4f6d0087af 100644 --- a/apps/sim/app/api/workflows/[id]/deploy/route.ts +++ b/apps/sim/app/api/workflows/[id]/deploy/route.ts @@ -1,26 +1,9 @@ -import { db, workflow, workflowDeploymentVersion } from '@sim/db' +import { db, workflow } from '@sim/db' import { createLogger } from '@sim/logger' -import { and, eq } from 'drizzle-orm' +import { eq } from 'drizzle-orm' import type { NextRequest } from 'next/server' -import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log' import { generateRequestId } from '@/lib/core/utils/request' -import { removeMcpToolsForWorkflow, syncMcpToolsForWorkflow } from '@/lib/mcp/workflow-mcp-sync' -import { - cleanupWebhooksForWorkflow, - restorePreviousVersionWebhooks, - saveTriggerWebhooksForDeploy, -} from '@/lib/webhooks/deploy' -import { - activateWorkflowVersionById, - deployWorkflow, - loadWorkflowFromNormalizedTables, - undeployWorkflow, -} from '@/lib/workflows/persistence/utils' -import { - cleanupDeploymentVersion, - createSchedulesForDeploy, - validateWorkflowSchedules, -} from '@/lib/workflows/schedules' +import { performFullDeploy, performFullUndeploy } from '@/lib/workflows/orchestration' import { validateWorkflowPermissions } from '@/lib/workflows/utils' import { checkNeedsRedeployment, @@ -97,164 +80,22 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{ return createErrorResponse('Unable to determine deploying user', 400) } - const normalizedData = await loadWorkflowFromNormalizedTables(id) - if (!normalizedData) { - return createErrorResponse('Failed to load workflow state', 500) - } - - const scheduleValidation = validateWorkflowSchedules(normalizedData.blocks) - if (!scheduleValidation.isValid) { - logger.warn( - `[${requestId}] Schedule validation failed for workflow ${id}: ${scheduleValidation.error}` - ) - return createErrorResponse(`Invalid schedule configuration: ${scheduleValidation.error}`, 400) - } - - const [currentActiveVersion] = await db - .select({ id: workflowDeploymentVersion.id }) - .from(workflowDeploymentVersion) - .where( - and( - eq(workflowDeploymentVersion.workflowId, id), - eq(workflowDeploymentVersion.isActive, true) - ) - ) - .limit(1) - const previousVersionId = currentActiveVersion?.id - - const rollbackDeployment = async () => { - if (previousVersionId) { - await restorePreviousVersionWebhooks({ - request, - workflow: workflowData as Record, - userId: actorUserId, - previousVersionId, - requestId, - }) - const reactivateResult = await activateWorkflowVersionById({ - workflowId: id, - deploymentVersionId: previousVersionId, - }) - if (reactivateResult.success) { - return - } - } - - await undeployWorkflow({ workflowId: id }) - } - - const deployResult = await deployWorkflow({ - workflowId: id, - deployedBy: actorUserId, - workflowName: workflowData!.name, - }) - - if (!deployResult.success) { - return createErrorResponse(deployResult.error || 'Failed to deploy workflow', 500) - } - - const deployedAt = deployResult.deployedAt! - const deploymentVersionId = deployResult.deploymentVersionId - - if (!deploymentVersionId) { - await undeployWorkflow({ workflowId: id }) - return createErrorResponse('Failed to resolve deployment version', 500) - } - - const triggerSaveResult = await saveTriggerWebhooksForDeploy({ - request, + const result = await performFullDeploy({ workflowId: id, - workflow: workflowData, userId: actorUserId, - blocks: normalizedData.blocks, + workflowName: workflowData!.name || undefined, requestId, - deploymentVersionId, - previousVersionId, + request, }) - if (!triggerSaveResult.success) { - await cleanupDeploymentVersion({ - workflowId: id, - workflow: workflowData as Record, - requestId, - deploymentVersionId, - }) - await rollbackDeployment() - return createErrorResponse( - triggerSaveResult.error?.message || 'Failed to save trigger configuration', - triggerSaveResult.error?.status || 500 - ) - } - - let scheduleInfo: { scheduleId?: string; cronExpression?: string; nextRunAt?: Date } = {} - const scheduleResult = await createSchedulesForDeploy( - id, - normalizedData.blocks, - db, - deploymentVersionId - ) - if (!scheduleResult.success) { - logger.error( - `[${requestId}] Failed to create schedule for workflow ${id}: ${scheduleResult.error}` - ) - await cleanupDeploymentVersion({ - workflowId: id, - workflow: workflowData as Record, - requestId, - deploymentVersionId, - }) - await rollbackDeployment() - return createErrorResponse(scheduleResult.error || 'Failed to create schedule', 500) - } - if (scheduleResult.scheduleId) { - scheduleInfo = { - scheduleId: scheduleResult.scheduleId, - cronExpression: scheduleResult.cronExpression, - nextRunAt: scheduleResult.nextRunAt, - } - logger.info( - `[${requestId}] Schedule created for workflow ${id}: ${scheduleResult.scheduleId}` - ) - } - - if (previousVersionId && previousVersionId !== deploymentVersionId) { - try { - logger.info(`[${requestId}] Cleaning up previous version ${previousVersionId} DB records`) - await cleanupDeploymentVersion({ - workflowId: id, - workflow: workflowData as Record, - requestId, - deploymentVersionId: previousVersionId, - skipExternalCleanup: true, - }) - } catch (cleanupError) { - logger.error( - `[${requestId}] Failed to clean up previous version ${previousVersionId}`, - cleanupError - ) - // Non-fatal - continue with success response - } + if (!result.success) { + const status = + result.errorCode === 'validation' ? 400 : result.errorCode === 'not_found' ? 404 : 500 + return createErrorResponse(result.error || 'Failed to deploy workflow', status) } logger.info(`[${requestId}] Workflow deployed successfully: ${id}`) - // Sync MCP tools with the latest parameter schema - await syncMcpToolsForWorkflow({ workflowId: id, requestId, context: 'deploy' }) - - recordAudit({ - workspaceId: workflowData?.workspaceId || null, - actorId: actorUserId, - actorName: session?.user?.name, - actorEmail: session?.user?.email, - action: AuditAction.WORKFLOW_DEPLOYED, - resourceType: AuditResourceType.WORKFLOW, - resourceId: id, - resourceName: workflowData?.name, - description: `Deployed workflow "${workflowData?.name || id}"`, - metadata: { version: deploymentVersionId }, - request, - }) - const responseApiKeyInfo = workflowData!.workspaceId ? 'Workspace API keys' : 'Personal API keys' @@ -262,25 +103,13 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{ return createSuccessResponse({ apiKey: responseApiKeyInfo, isDeployed: true, - deployedAt, - schedule: scheduleInfo.scheduleId - ? { - id: scheduleInfo.scheduleId, - cronExpression: scheduleInfo.cronExpression, - nextRunAt: scheduleInfo.nextRunAt, - } - : undefined, - warnings: triggerSaveResult.warnings, - }) - } catch (error: any) { - logger.error(`[${requestId}] Error deploying workflow: ${id}`, { - error: error.message, - stack: error.stack, - name: error.name, - cause: error.cause, - fullError: error, + deployedAt: result.deployedAt, + warnings: result.warnings, }) - return createErrorResponse(error.message || 'Failed to deploy workflow', 500) + } catch (error: unknown) { + const message = error instanceof Error ? error.message : 'Failed to deploy workflow' + logger.error(`[${requestId}] Error deploying workflow: ${id}`, { error }) + return createErrorResponse(message, 500) } } @@ -328,60 +157,36 @@ export async function PATCH(request: NextRequest, { params }: { params: Promise< } export async function DELETE( - request: NextRequest, + _request: NextRequest, { params }: { params: Promise<{ id: string }> } ) { const requestId = generateRequestId() const { id } = await params try { - const { - error, - session, - workflow: workflowData, - } = await validateWorkflowPermissions(id, requestId, 'admin') + const { error, session } = await validateWorkflowPermissions(id, requestId, 'admin') if (error) { return createErrorResponse(error.message, error.status) } - const result = await undeployWorkflow({ workflowId: id }) + const result = await performFullUndeploy({ + workflowId: id, + userId: session!.user.id, + requestId, + }) + if (!result.success) { return createErrorResponse(result.error || 'Failed to undeploy workflow', 500) } - await cleanupWebhooksForWorkflow(id, workflowData as Record, requestId) - - await removeMcpToolsForWorkflow(id, requestId) - - logger.info(`[${requestId}] Workflow undeployed successfully: ${id}`) - - try { - const { PlatformEvents } = await import('@/lib/core/telemetry') - PlatformEvents.workflowUndeployed({ workflowId: id }) - } catch (_e) { - // Silently fail - } - - recordAudit({ - workspaceId: workflowData?.workspaceId || null, - actorId: session!.user.id, - actorName: session?.user?.name, - actorEmail: session?.user?.email, - action: AuditAction.WORKFLOW_UNDEPLOYED, - resourceType: AuditResourceType.WORKFLOW, - resourceId: id, - resourceName: workflowData?.name, - description: `Undeployed workflow "${workflowData?.name || id}"`, - request, - }) - return createSuccessResponse({ isDeployed: false, deployedAt: null, apiKey: null, }) - } catch (error: any) { - logger.error(`[${requestId}] Error undeploying workflow: ${id}`, error) - return createErrorResponse(error.message || 'Failed to undeploy workflow', 500) + } catch (error: unknown) { + const message = error instanceof Error ? error.message : 'Failed to undeploy workflow' + logger.error(`[${requestId}] Error undeploying workflow: ${id}`, { error }) + return createErrorResponse(message, 500) } } diff --git a/apps/sim/app/api/workflows/[id]/deployments/[version]/route.ts b/apps/sim/app/api/workflows/[id]/deployments/[version]/route.ts index 56802840e95..7d4ab62d52c 100644 --- a/apps/sim/app/api/workflows/[id]/deployments/[version]/route.ts +++ b/apps/sim/app/api/workflows/[id]/deployments/[version]/route.ts @@ -3,19 +3,10 @@ import { createLogger } from '@sim/logger' import { and, eq } from 'drizzle-orm' import type { NextRequest } from 'next/server' import { z } from 'zod' -import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log' import { generateRequestId } from '@/lib/core/utils/request' -import { syncMcpToolsForWorkflow } from '@/lib/mcp/workflow-mcp-sync' -import { restorePreviousVersionWebhooks, saveTriggerWebhooksForDeploy } from '@/lib/webhooks/deploy' -import { activateWorkflowVersion } from '@/lib/workflows/persistence/utils' -import { - cleanupDeploymentVersion, - createSchedulesForDeploy, - validateWorkflowSchedules, -} from '@/lib/workflows/schedules' +import { performActivateVersion } from '@/lib/workflows/orchestration' import { validateWorkflowPermissions } from '@/lib/workflows/utils' import { createErrorResponse, createSuccessResponse } from '@/app/api/workflows/utils' -import type { BlockState } from '@/stores/workflows/workflow/types' const logger = createLogger('WorkflowDeploymentVersionAPI') @@ -129,140 +120,25 @@ export async function PATCH( return createErrorResponse('Unable to determine activating user', 400) } - const [versionRow] = await db - .select({ - id: workflowDeploymentVersion.id, - state: workflowDeploymentVersion.state, - }) - .from(workflowDeploymentVersion) - .where( - and( - eq(workflowDeploymentVersion.workflowId, id), - eq(workflowDeploymentVersion.version, versionNum) - ) - ) - .limit(1) - - if (!versionRow?.state) { - return createErrorResponse('Deployment version not found', 404) - } - - const [currentActiveVersion] = await db - .select({ id: workflowDeploymentVersion.id }) - .from(workflowDeploymentVersion) - .where( - and( - eq(workflowDeploymentVersion.workflowId, id), - eq(workflowDeploymentVersion.isActive, true) - ) - ) - .limit(1) - - const previousVersionId = currentActiveVersion?.id - - const deployedState = versionRow.state as { blocks?: Record } - const blocks = deployedState.blocks - if (!blocks || typeof blocks !== 'object') { - return createErrorResponse('Invalid deployed state structure', 500) - } - - const scheduleValidation = validateWorkflowSchedules(blocks) - if (!scheduleValidation.isValid) { - return createErrorResponse( - `Invalid schedule configuration: ${scheduleValidation.error}`, - 400 - ) - } - - const triggerSaveResult = await saveTriggerWebhooksForDeploy({ - request, + const activateResult = await performActivateVersion({ workflowId: id, - workflow: workflowData as Record, + version: versionNum, userId: actorUserId, - blocks, + workflow: workflowData as Record, requestId, - deploymentVersionId: versionRow.id, - previousVersionId, - forceRecreateSubscriptions: true, + request, }) - if (!triggerSaveResult.success) { - return createErrorResponse( - triggerSaveResult.error?.message || 'Failed to sync trigger configuration', - triggerSaveResult.error?.status || 500 - ) - } - - const scheduleResult = await createSchedulesForDeploy(id, blocks, db, versionRow.id) - - if (!scheduleResult.success) { - await cleanupDeploymentVersion({ - workflowId: id, - workflow: workflowData as Record, - requestId, - deploymentVersionId: versionRow.id, - }) - if (previousVersionId) { - await restorePreviousVersionWebhooks({ - request, - workflow: workflowData as Record, - userId: actorUserId, - previousVersionId, - requestId, - }) - } - return createErrorResponse(scheduleResult.error || 'Failed to sync schedules', 500) + if (!activateResult.success) { + const status = + activateResult.errorCode === 'not_found' + ? 404 + : activateResult.errorCode === 'validation' + ? 400 + : 500 + return createErrorResponse(activateResult.error || 'Failed to activate deployment', status) } - const result = await activateWorkflowVersion({ workflowId: id, version: versionNum }) - if (!result.success) { - await cleanupDeploymentVersion({ - workflowId: id, - workflow: workflowData as Record, - requestId, - deploymentVersionId: versionRow.id, - }) - if (previousVersionId) { - await restorePreviousVersionWebhooks({ - request, - workflow: workflowData as Record, - userId: actorUserId, - previousVersionId, - requestId, - }) - } - return createErrorResponse(result.error || 'Failed to activate deployment', 400) - } - - if (previousVersionId && previousVersionId !== versionRow.id) { - try { - logger.info( - `[${requestId}] Cleaning up previous version ${previousVersionId} webhooks/schedules` - ) - await cleanupDeploymentVersion({ - workflowId: id, - workflow: workflowData as Record, - requestId, - deploymentVersionId: previousVersionId, - skipExternalCleanup: true, - }) - logger.info(`[${requestId}] Previous version cleanup completed`) - } catch (cleanupError) { - logger.error( - `[${requestId}] Failed to clean up previous version ${previousVersionId}`, - cleanupError - ) - } - } - - await syncMcpToolsForWorkflow({ - workflowId: id, - requestId, - state: versionRow.state, - context: 'activate', - }) - - // Apply name/description updates if provided alongside activation let updatedName: string | null | undefined let updatedDescription: string | null | undefined if (name !== undefined || description !== undefined) { @@ -298,23 +174,10 @@ export async function PATCH( } } - recordAudit({ - workspaceId: workflowData?.workspaceId, - actorId: actorUserId, - actorName: session?.user?.name, - actorEmail: session?.user?.email, - action: AuditAction.WORKFLOW_DEPLOYMENT_ACTIVATED, - resourceType: AuditResourceType.WORKFLOW, - resourceId: id, - description: `Activated deployment version ${versionNum}`, - metadata: { version: versionNum }, - request, - }) - return createSuccessResponse({ success: true, - deployedAt: result.deployedAt, - warnings: triggerSaveResult.warnings, + deployedAt: activateResult.deployedAt, + warnings: activateResult.warnings, ...(updatedName !== undefined && { name: updatedName }), ...(updatedDescription !== undefined && { description: updatedDescription }), }) diff --git a/apps/sim/app/api/workflows/[id]/route.test.ts b/apps/sim/app/api/workflows/[id]/route.test.ts index 2000e5093ee..383594b5453 100644 --- a/apps/sim/app/api/workflows/[id]/route.test.ts +++ b/apps/sim/app/api/workflows/[id]/route.test.ts @@ -5,14 +5,7 @@ * @vitest-environment node */ -import { - auditMock, - envMock, - loggerMock, - requestUtilsMock, - setupGlobalFetchMock, - telemetryMock, -} from '@sim/testing' +import { auditMock, envMock, loggerMock, requestUtilsMock, telemetryMock } from '@sim/testing' import { NextRequest } from 'next/server' import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest' @@ -21,7 +14,7 @@ const mockCheckSessionOrInternalAuth = vi.fn() const mockLoadWorkflowFromNormalizedTables = vi.fn() const mockGetWorkflowById = vi.fn() const mockAuthorizeWorkflowByWorkspacePermission = vi.fn() -const mockArchiveWorkflow = vi.fn() +const mockPerformDeleteWorkflow = vi.fn() const mockDbUpdate = vi.fn() const mockDbSelect = vi.fn() @@ -72,8 +65,8 @@ vi.mock('@/lib/workflows/utils', () => ({ }) => mockAuthorizeWorkflowByWorkspacePermission(params), })) -vi.mock('@/lib/workflows/lifecycle', () => ({ - archiveWorkflow: (...args: unknown[]) => mockArchiveWorkflow(...args), +vi.mock('@/lib/workflows/orchestration', () => ({ + performDeleteWorkflow: (...args: unknown[]) => mockPerformDeleteWorkflow(...args), })) vi.mock('@sim/db', () => ({ @@ -294,18 +287,7 @@ describe('Workflow By ID API Route', () => { workspacePermission: 'admin', }) - mockDbSelect.mockReturnValue({ - from: vi.fn().mockReturnValue({ - where: vi.fn().mockResolvedValue([{ id: 'workflow-123' }, { id: 'workflow-456' }]), - }), - }) - - mockArchiveWorkflow.mockResolvedValue({ - archived: true, - workflow: mockWorkflow, - }) - - setupGlobalFetchMock({ ok: true }) + mockPerformDeleteWorkflow.mockResolvedValue({ success: true }) const req = new NextRequest('http://localhost:3000/api/workflows/workflow-123', { method: 'DELETE', @@ -317,6 +299,12 @@ describe('Workflow By ID API Route', () => { expect(response.status).toBe(200) const data = await response.json() expect(data.success).toBe(true) + expect(mockPerformDeleteWorkflow).toHaveBeenCalledWith( + expect.objectContaining({ + workflowId: 'workflow-123', + userId: 'user-123', + }) + ) }) it('should allow admin to delete workspace workflow', async () => { @@ -337,19 +325,7 @@ describe('Workflow By ID API Route', () => { workspacePermission: 'admin', }) - // Mock db.select() to return multiple workflows so deletion is allowed - mockDbSelect.mockReturnValue({ - from: vi.fn().mockReturnValue({ - where: vi.fn().mockResolvedValue([{ id: 'workflow-123' }, { id: 'workflow-456' }]), - }), - }) - - mockArchiveWorkflow.mockResolvedValue({ - archived: true, - workflow: mockWorkflow, - }) - - setupGlobalFetchMock({ ok: true }) + mockPerformDeleteWorkflow.mockResolvedValue({ success: true }) const req = new NextRequest('http://localhost:3000/api/workflows/workflow-123', { method: 'DELETE', @@ -381,11 +357,10 @@ describe('Workflow By ID API Route', () => { workspacePermission: 'admin', }) - // Mock db.select() to return only 1 workflow (the one being deleted) - mockDbSelect.mockReturnValue({ - from: vi.fn().mockReturnValue({ - where: vi.fn().mockResolvedValue([{ id: 'workflow-123' }]), - }), + mockPerformDeleteWorkflow.mockResolvedValue({ + success: false, + error: 'Cannot delete the only workflow in the workspace', + errorCode: 'validation', }) const req = new NextRequest('http://localhost:3000/api/workflows/workflow-123', { diff --git a/apps/sim/app/api/workflows/[id]/route.ts b/apps/sim/app/api/workflows/[id]/route.ts index 8b79fe2c287..c746d394db2 100644 --- a/apps/sim/app/api/workflows/[id]/route.ts +++ b/apps/sim/app/api/workflows/[id]/route.ts @@ -1,13 +1,12 @@ import { db } from '@sim/db' -import { templates, workflow } from '@sim/db/schema' +import { workflow } from '@sim/db/schema' import { createLogger } from '@sim/logger' import { and, eq, isNull, ne } from 'drizzle-orm' import { type NextRequest, NextResponse } from 'next/server' import { z } from 'zod' -import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log' import { AuthType, checkHybridAuth, checkSessionOrInternalAuth } from '@/lib/auth/hybrid' import { generateRequestId } from '@/lib/core/utils/request' -import { archiveWorkflow } from '@/lib/workflows/lifecycle' +import { performDeleteWorkflow } from '@/lib/workflows/orchestration' import { loadWorkflowFromNormalizedTables } from '@/lib/workflows/persistence/utils' import { authorizeWorkflowByWorkspacePermission, getWorkflowById } from '@/lib/workflows/utils' @@ -184,28 +183,12 @@ export async function DELETE( ) } - // Check if this is the last workflow in the workspace - if (workflowData.workspaceId) { - const totalWorkflowsInWorkspace = await db - .select({ id: workflow.id }) - .from(workflow) - .where(and(eq(workflow.workspaceId, workflowData.workspaceId), isNull(workflow.archivedAt))) - - if (totalWorkflowsInWorkspace.length <= 1) { - return NextResponse.json( - { error: 'Cannot delete the only workflow in the workspace' }, - { status: 400 } - ) - } - } - - // Check if workflow has published templates before deletion const { searchParams } = new URL(request.url) const checkTemplates = searchParams.get('check-templates') === 'true' const deleteTemplatesParam = searchParams.get('deleteTemplates') if (checkTemplates) { - // Return template information for frontend to handle + const { templates } = await import('@sim/db/schema') const publishedTemplates = await db .select({ id: templates.id, @@ -229,49 +212,22 @@ export async function DELETE( }) } - // Handle template deletion based on user choice - if (deleteTemplatesParam !== null) { - const deleteTemplates = deleteTemplatesParam === 'delete' - - if (deleteTemplates) { - // Delete all templates associated with this workflow - await db.delete(templates).where(eq(templates.workflowId, workflowId)) - logger.info(`[${requestId}] Deleted templates for workflow ${workflowId}`) - } else { - // Orphan the templates (set workflowId to null) - await db - .update(templates) - .set({ workflowId: null }) - .where(eq(templates.workflowId, workflowId)) - logger.info(`[${requestId}] Orphaned templates for workflow ${workflowId}`) - } - } + const result = await performDeleteWorkflow({ + workflowId, + userId, + requestId, + templateAction: deleteTemplatesParam === 'delete' ? 'delete' : 'orphan', + }) - const archiveResult = await archiveWorkflow(workflowId, { requestId }) - if (!archiveResult.workflow) { - return NextResponse.json({ error: 'Workflow not found' }, { status: 404 }) + if (!result.success) { + const status = + result.errorCode === 'not_found' ? 404 : result.errorCode === 'validation' ? 400 : 500 + return NextResponse.json({ error: result.error }, { status }) } const elapsed = Date.now() - startTime logger.info(`[${requestId}] Successfully archived workflow ${workflowId} in ${elapsed}ms`) - recordAudit({ - workspaceId: workflowData.workspaceId || null, - actorId: userId, - actorName: auth.userName, - actorEmail: auth.userEmail, - action: AuditAction.WORKFLOW_DELETED, - resourceType: AuditResourceType.WORKFLOW, - resourceId: workflowId, - resourceName: workflowData.name, - description: `Archived workflow "${workflowData.name}"`, - metadata: { - archived: archiveResult.archived, - deleteTemplates: deleteTemplatesParam === 'delete', - }, - request, - }) - return NextResponse.json({ success: true }, { status: 200 }) } catch (error: any) { const elapsed = Date.now() - startTime diff --git a/apps/sim/lib/audit/log.ts b/apps/sim/lib/audit/log.ts index 6a45f49664f..f37e27b382a 100644 --- a/apps/sim/lib/audit/log.ts +++ b/apps/sim/lib/audit/log.ts @@ -26,6 +26,11 @@ export const AuditAction = { CHAT_UPDATED: 'chat.updated', CHAT_DELETED: 'chat.deleted', + // Custom Tools + CUSTOM_TOOL_CREATED: 'custom_tool.created', + CUSTOM_TOOL_UPDATED: 'custom_tool.updated', + CUSTOM_TOOL_DELETED: 'custom_tool.deleted', + // Billing CREDIT_PURCHASED: 'credit.purchased', @@ -99,8 +104,10 @@ export const AuditAction = { NOTIFICATION_UPDATED: 'notification.updated', NOTIFICATION_DELETED: 'notification.deleted', - // OAuth + // OAuth / Credentials OAUTH_DISCONNECTED: 'oauth.disconnected', + CREDENTIAL_RENAMED: 'credential.renamed', + CREDENTIAL_DELETED: 'credential.deleted', // Password PASSWORD_RESET: 'password.reset', @@ -124,6 +131,11 @@ export const AuditAction = { PERMISSION_GROUP_MEMBER_ADDED: 'permission_group_member.added', PERMISSION_GROUP_MEMBER_REMOVED: 'permission_group_member.removed', + // Skills + SKILL_CREATED: 'skill.created', + SKILL_UPDATED: 'skill.updated', + SKILL_DELETED: 'skill.deleted', + // Schedules SCHEDULE_UPDATED: 'schedule.updated', @@ -173,6 +185,7 @@ export const AuditResourceType = { CHAT: 'chat', CONNECTOR: 'connector', CREDENTIAL_SET: 'credential_set', + CUSTOM_TOOL: 'custom_tool', DOCUMENT: 'document', ENVIRONMENT: 'environment', FILE: 'file', @@ -186,6 +199,7 @@ export const AuditResourceType = { PASSWORD: 'password', PERMISSION_GROUP: 'permission_group', SCHEDULE: 'schedule', + SKILL: 'skill', TABLE: 'table', TEMPLATE: 'template', WEBHOOK: 'webhook', diff --git a/apps/sim/lib/copilot/client-sse/content-blocks.ts b/apps/sim/lib/copilot/client-sse/content-blocks.ts deleted file mode 100644 index e7ec0b726a2..00000000000 --- a/apps/sim/lib/copilot/client-sse/content-blocks.ts +++ /dev/null @@ -1,62 +0,0 @@ -import type { ClientContentBlock, ClientStreamingContext } from '@/lib/copilot/client-sse/types' - -/** - * Appends plain text to the active text block, or starts a new one when needed. - */ -export function appendTextBlock(context: ClientStreamingContext, content: string): void { - if (!content) return - - context.accumulatedContent += content - - if (context.currentTextBlock?.type === 'text') { - context.currentTextBlock.content = `${context.currentTextBlock.content || ''}${content}` - return - } - - const block: ClientContentBlock = { - type: 'text', - content, - timestamp: Date.now(), - } - - context.currentTextBlock = block - context.contentBlocks.push(block) -} - -/** - * Starts a new thinking block when the stream enters a reasoning segment. - */ -export function beginThinkingBlock(context: ClientStreamingContext): void { - if (context.currentThinkingBlock) { - context.isInThinkingBlock = true - context.currentTextBlock = null - return - } - - const block: ClientContentBlock = { - type: 'thinking', - content: '', - timestamp: Date.now(), - startTime: Date.now(), - } - - context.currentThinkingBlock = block - context.contentBlocks.push(block) - context.currentTextBlock = null - context.isInThinkingBlock = true -} - -/** - * Closes the active thinking block and records its visible duration. - */ -export function finalizeThinkingBlock(context: ClientStreamingContext): void { - if (!context.currentThinkingBlock) { - context.isInThinkingBlock = false - return - } - - const startTime = context.currentThinkingBlock.startTime ?? context.currentThinkingBlock.timestamp - context.currentThinkingBlock.duration = Math.max(0, Date.now() - startTime) - context.currentThinkingBlock = null - context.isInThinkingBlock = false -} diff --git a/apps/sim/lib/copilot/client-sse/handlers.ts b/apps/sim/lib/copilot/client-sse/handlers.ts deleted file mode 100644 index e812311d5f3..00000000000 --- a/apps/sim/lib/copilot/client-sse/handlers.ts +++ /dev/null @@ -1,1115 +0,0 @@ -import { createLogger } from '@sim/logger' -import { - appendTextBlock, - beginThinkingBlock, - finalizeThinkingBlock, -} from '@/lib/copilot/client-sse/content-blocks' -import { STREAM_STORAGE_KEY } from '@/lib/copilot/constants' -import { asRecord } from '@/lib/copilot/orchestrator/sse/utils' -import type { SSEEvent } from '@/lib/copilot/orchestrator/types' -import { - isBackgroundState, - isRejectedState, - isReviewState, - resolveToolDisplay, -} from '@/lib/copilot/store-utils' -import { ClientToolCallState } from '@/lib/copilot/tools/client/tool-display-registry' -import type { CopilotStore, CopilotStreamInfo, CopilotToolCall } from '@/stores/panel/copilot/types' -import { useVariablesStore } from '@/stores/panel/variables/store' -import { useEnvironmentStore } from '@/stores/settings/environment/store' -import { useWorkflowDiffStore } from '@/stores/workflow-diff/store' -import { captureBaselineSnapshot } from '@/stores/workflow-diff/utils' -import { useWorkflowRegistry } from '@/stores/workflows/registry/store' -import type { WorkflowState } from '@/stores/workflows/workflow/types' -import type { ClientContentBlock, ClientStreamingContext } from './types' - -const logger = createLogger('CopilotClientSseHandlers') -const TEXT_BLOCK_TYPE = 'text' - -const MAX_BATCH_INTERVAL = 50 -const MIN_BATCH_INTERVAL = 16 -const MAX_QUEUE_SIZE = 5 - -interface StreamMessage extends Record { - id: string - requestId?: string - content?: string - contentBlocks?: ClientContentBlock[] - error?: string -} - -interface ChatSummary extends Record { - id: string - title?: string -} - -function isStreamMessage(value: Record): value is StreamMessage { - return typeof value.id === 'string' -} - -function getStreamMessages(store: CopilotStore): StreamMessage[] { - return store.messages.filter(isStreamMessage) -} - -function getChatState(store: CopilotStore): { - currentChat?: ChatSummary - chats: ChatSummary[] -} { - const currentChat = asRecord(store.currentChat) - const rawChats = Array.isArray(store.chats) ? store.chats : [] - - return { - currentChat: - typeof currentChat.id === 'string' - ? { ...currentChat, id: currentChat.id, title: currentChat.title as string | undefined } - : undefined, - chats: rawChats - .map((chat) => asRecord(chat)) - .filter( - (chat): chat is Record & { id: string } => typeof chat.id === 'string' - ) - .map((chat) => ({ ...chat, id: chat.id, title: chat.title as string | undefined })), - } -} - -function abortInProgressTools(set: StoreSet, get: () => CopilotStore): void { - const { toolCallsById } = get() - const updatedToolCalls = Object.fromEntries( - Object.entries(toolCallsById).map(([toolCallId, toolCall]) => { - if ( - toolCall.state === ClientToolCallState.executing || - toolCall.state === ClientToolCallState.generating || - toolCall.state === ClientToolCallState.pending - ) { - const state = ClientToolCallState.aborted - return [ - toolCallId, - { - ...toolCall, - state, - display: resolveToolDisplay( - toolCall.name, - state, - toolCall.id, - toolCall.params, - toolCall.serverUI - ), - }, - ] - } - - return [toolCallId, toolCall] - }) - ) - - set({ toolCallsById: updatedToolCalls }) -} - -function writeActiveStreamToStorage(info: CopilotStreamInfo | null): void { - if (typeof window === 'undefined') return - try { - if (!info) { - window.sessionStorage.removeItem(STREAM_STORAGE_KEY) - return - } - window.sessionStorage.setItem(STREAM_STORAGE_KEY, JSON.stringify(info)) - } catch (error) { - logger.warn('Failed to write active stream to storage', { - error: error instanceof Error ? error.message : String(error), - }) - } -} - -type StoreSet = ( - partial: Partial | ((state: CopilotStore) => Partial) -) => void - -export type SSEHandler = ( - data: SSEEvent, - context: ClientStreamingContext, - get: () => CopilotStore, - set: StoreSet -) => Promise | void - -const streamingUpdateQueue = new Map() -let streamingUpdateRAF: number | null = null -let lastBatchTime = 0 - -export function stopStreamingUpdates() { - if (streamingUpdateRAF !== null) { - cancelAnimationFrame(streamingUpdateRAF) - streamingUpdateRAF = null - } - streamingUpdateQueue.clear() -} - -function createOptimizedContentBlocks(contentBlocks: ClientContentBlock[]): ClientContentBlock[] { - const result: ClientContentBlock[] = new Array(contentBlocks.length) - for (let i = 0; i < contentBlocks.length; i++) { - const block = contentBlocks[i] - result[i] = { ...block } - } - return result -} - -export function flushStreamingUpdates(set: StoreSet) { - if (streamingUpdateRAF !== null) { - cancelAnimationFrame(streamingUpdateRAF) - streamingUpdateRAF = null - } - if (streamingUpdateQueue.size === 0) return - - const updates = new Map(streamingUpdateQueue) - streamingUpdateQueue.clear() - - set((state: CopilotStore) => { - if (updates.size === 0) return state - return { - messages: getStreamMessages(state).map((msg) => { - const update = updates.get(msg.id) - if (update) { - return { - ...msg, - requestId: update.requestId ?? msg.requestId, - content: '', - contentBlocks: - update.contentBlocks.length > 0 - ? createOptimizedContentBlocks(update.contentBlocks) - : [], - } - } - return msg - }), - } - }) -} - -export function updateStreamingMessage(set: StoreSet, context: ClientStreamingContext) { - if (context.suppressStreamingUpdates) return - const now = performance.now() - streamingUpdateQueue.set(context.messageId, context) - const timeSinceLastBatch = now - lastBatchTime - const shouldFlushImmediately = - streamingUpdateQueue.size >= MAX_QUEUE_SIZE || timeSinceLastBatch > MAX_BATCH_INTERVAL - - if (streamingUpdateRAF === null) { - const scheduleUpdate = () => { - streamingUpdateRAF = requestAnimationFrame(() => { - const updates = new Map(streamingUpdateQueue) - streamingUpdateQueue.clear() - streamingUpdateRAF = null - lastBatchTime = performance.now() - set((state: CopilotStore) => { - if (updates.size === 0) return state - const messages = getStreamMessages(state) - const lastMessage = messages[messages.length - 1] - const lastMessageUpdate = lastMessage ? updates.get(lastMessage.id) : null - if (updates.size === 1 && lastMessageUpdate) { - const newMessages = [...messages] - newMessages[messages.length - 1] = { - ...lastMessage, - requestId: lastMessageUpdate.requestId ?? lastMessage.requestId, - content: '', - contentBlocks: - lastMessageUpdate.contentBlocks.length > 0 - ? createOptimizedContentBlocks(lastMessageUpdate.contentBlocks) - : [], - } - return { messages: newMessages } - } - return { - messages: messages.map((msg) => { - const update = updates.get(msg.id) - if (update) { - return { - ...msg, - requestId: update.requestId ?? msg.requestId, - content: '', - contentBlocks: - update.contentBlocks.length > 0 - ? createOptimizedContentBlocks(update.contentBlocks) - : [], - } - } - return msg - }), - } - }) - }) - } - if (shouldFlushImmediately) scheduleUpdate() - else setTimeout(scheduleUpdate, Math.max(0, MIN_BATCH_INTERVAL - timeSinceLastBatch)) - } -} - -export function upsertToolCallBlock(context: ClientStreamingContext, toolCall: CopilotToolCall) { - let found = false - for (let i = 0; i < context.contentBlocks.length; i++) { - const b = context.contentBlocks[i] - if (b.type === 'tool_call' && b.toolCall?.id === toolCall.id) { - context.contentBlocks[i] = { ...b, toolCall } - found = true - break - } - } - if (!found) { - context.contentBlocks.push({ type: 'tool_call', toolCall, timestamp: Date.now() }) - } -} - -function stripThinkingTags(text: string): string { - return text.replace(/<\/?thinking[^>]*>/gi, '').replace(/<\/?thinking[^&]*>/gi, '') -} - -function appendThinkingContent(context: ClientStreamingContext, text: string) { - if (!text) return - const cleanedText = stripThinkingTags(text) - if (!cleanedText) return - if (context.currentThinkingBlock) { - context.currentThinkingBlock.content += cleanedText - } else { - const newBlock: ClientContentBlock = { - type: 'thinking', - content: cleanedText, - timestamp: Date.now(), - startTime: Date.now(), - } - context.currentThinkingBlock = newBlock - context.contentBlocks.push(newBlock) - } - context.isInThinkingBlock = true - context.currentTextBlock = null -} - -function processContentBuffer( - context: ClientStreamingContext, - get: () => CopilotStore, - set: StoreSet -) { - let contentToProcess = context.pendingContent - let hasProcessedContent = false - - const thinkingStartRegex = // - const thinkingEndRegex = /<\/thinking>/ - const designWorkflowStartRegex = // - const designWorkflowEndRegex = /<\/design_workflow>/ - - const splitTrailingPartialTag = ( - text: string, - tags: string[] - ): { text: string; remaining: string } => { - const partialIndex = text.lastIndexOf('<') - if (partialIndex < 0) { - return { text, remaining: '' } - } - const possibleTag = text.substring(partialIndex) - const matchesTagStart = tags.some((tag) => tag.startsWith(possibleTag)) - if (!matchesTagStart) { - return { text, remaining: '' } - } - return { - text: text.substring(0, partialIndex), - remaining: possibleTag, - } - } - - while (contentToProcess.length > 0) { - if (context.isInDesignWorkflowBlock) { - const endMatch = designWorkflowEndRegex.exec(contentToProcess) - if (endMatch) { - const designContent = contentToProcess.substring(0, endMatch.index) - context.designWorkflowContent += designContent - context.isInDesignWorkflowBlock = false - - logger.info('[design_workflow] Tag complete, setting plan content', { - contentLength: context.designWorkflowContent.length, - }) - set({ streamingPlanContent: context.designWorkflowContent }) - - contentToProcess = contentToProcess.substring(endMatch.index + endMatch[0].length) - hasProcessedContent = true - } else { - const { text, remaining } = splitTrailingPartialTag(contentToProcess, [ - '', - ]) - context.designWorkflowContent += text - - set({ streamingPlanContent: context.designWorkflowContent }) - - contentToProcess = remaining - hasProcessedContent = true - if (remaining) { - break - } - } - continue - } - - if (!context.isInThinkingBlock && !context.isInDesignWorkflowBlock) { - const designStartMatch = designWorkflowStartRegex.exec(contentToProcess) - if (designStartMatch) { - const textBeforeDesign = contentToProcess.substring(0, designStartMatch.index) - if (textBeforeDesign) { - appendTextBlock(context, textBeforeDesign) - hasProcessedContent = true - } - context.isInDesignWorkflowBlock = true - context.designWorkflowContent = '' - contentToProcess = contentToProcess.substring( - designStartMatch.index + designStartMatch[0].length - ) - hasProcessedContent = true - continue - } - - const nextMarkIndex = contentToProcess.indexOf('') - const nextCheckIndex = contentToProcess.indexOf('') - const hasMark = nextMarkIndex >= 0 - const hasCheck = nextCheckIndex >= 0 - - const nextTagIndex = - hasMark && hasCheck - ? Math.min(nextMarkIndex, nextCheckIndex) - : hasMark - ? nextMarkIndex - : hasCheck - ? nextCheckIndex - : -1 - - if (nextTagIndex >= 0) { - const isMarkTodo = hasMark && nextMarkIndex === nextTagIndex - const tagStart = isMarkTodo ? '' : '' - const tagEnd = isMarkTodo ? '' : '' - const closingIndex = contentToProcess.indexOf(tagEnd, nextTagIndex + tagStart.length) - - if (closingIndex === -1) { - break - } - - const todoId = contentToProcess - .substring(nextTagIndex + tagStart.length, closingIndex) - .trim() - logger.info( - isMarkTodo ? '[TODO] Detected marktodo tag' : '[TODO] Detected checkofftodo tag', - { todoId } - ) - - if (todoId) { - try { - get().updatePlanTodoStatus(todoId, isMarkTodo ? 'executing' : 'completed') - logger.info( - isMarkTodo - ? '[TODO] Successfully marked todo in progress' - : '[TODO] Successfully checked off todo', - { todoId } - ) - } catch (e) { - logger.error( - isMarkTodo - ? '[TODO] Failed to mark todo in progress' - : '[TODO] Failed to checkoff todo', - { todoId, error: e } - ) - } - } else { - logger.warn('[TODO] Empty todoId extracted from todo tag', { tagType: tagStart }) - } - - let beforeTag = contentToProcess.substring(0, nextTagIndex) - let afterTag = contentToProcess.substring(closingIndex + tagEnd.length) - - const hadNewlineBefore = /(\r?\n)+$/.test(beforeTag) - const hadNewlineAfter = /^(\r?\n)+/.test(afterTag) - - beforeTag = beforeTag.replace(/(\r?\n)+$/, '') - afterTag = afterTag.replace(/^(\r?\n)+/, '') - - contentToProcess = beforeTag + (hadNewlineBefore && hadNewlineAfter ? '\n' : '') + afterTag - context.currentTextBlock = null - hasProcessedContent = true - continue - } - } - - if (context.isInThinkingBlock) { - const endMatch = thinkingEndRegex.exec(contentToProcess) - if (endMatch) { - const thinkingContent = contentToProcess.substring(0, endMatch.index) - appendThinkingContent(context, thinkingContent) - finalizeThinkingBlock(context) - contentToProcess = contentToProcess.substring(endMatch.index + endMatch[0].length) - hasProcessedContent = true - } else { - const { text, remaining } = splitTrailingPartialTag(contentToProcess, ['']) - if (text) { - appendThinkingContent(context, text) - hasProcessedContent = true - } - contentToProcess = remaining - if (remaining) { - break - } - } - } else { - const startMatch = thinkingStartRegex.exec(contentToProcess) - if (startMatch) { - const textBeforeThinking = contentToProcess.substring(0, startMatch.index) - if (textBeforeThinking) { - appendTextBlock(context, textBeforeThinking) - hasProcessedContent = true - } - context.isInThinkingBlock = true - context.currentTextBlock = null - contentToProcess = contentToProcess.substring(startMatch.index + startMatch[0].length) - hasProcessedContent = true - } else { - let partialTagIndex = contentToProcess.lastIndexOf('<') - - const partialMarkTodo = contentToProcess.lastIndexOf(' partialTagIndex) { - partialTagIndex = partialMarkTodo - } - if (partialCheckoffTodo > partialTagIndex) { - partialTagIndex = partialCheckoffTodo - } - - let textToAdd = contentToProcess - let remaining = '' - if (partialTagIndex >= 0 && partialTagIndex > contentToProcess.length - 50) { - textToAdd = contentToProcess.substring(0, partialTagIndex) - remaining = contentToProcess.substring(partialTagIndex) - } - if (textToAdd) { - appendTextBlock(context, textToAdd) - hasProcessedContent = true - } - contentToProcess = remaining - break - } - } - } - - context.pendingContent = contentToProcess - if (hasProcessedContent) { - updateStreamingMessage(set, context) - } -} - -export const sseHandlers: Record = { - chat_id: async (data, context, get, set) => { - context.newChatId = data.chatId - const { currentChat, activeStream } = get() - if (!currentChat && context.newChatId) { - await get().handleNewChatCreation(context.newChatId) - } - if (activeStream && context.newChatId && !activeStream.chatId) { - const updatedStream = { ...activeStream, chatId: context.newChatId } - set({ activeStream: updatedStream }) - writeActiveStreamToStorage(updatedStream) - } - }, - request_id: (data, context) => { - const requestId = typeof data.data === 'string' ? data.data : undefined - if (requestId) { - context.requestId = requestId - } - }, - title_updated: (_data, _context, get, set) => { - const title = _data.title - if (!title) return - const { currentChat, chats } = getChatState(get()) - if (currentChat) { - set({ - currentChat: { ...currentChat, title }, - chats: chats.map((c) => (c.id === currentChat.id ? { ...c, title } : c)), - }) - } - }, - tool_result: (data, context, get, set) => { - try { - const eventData = asRecord(data?.data) - const toolCallId: string | undefined = - data?.toolCallId || (eventData.id as string | undefined) - if (!toolCallId) return - const { toolCallsById } = get() - const current = toolCallsById[toolCallId] - if (current) { - if ( - isRejectedState(current.state) || - isReviewState(current.state) || - isBackgroundState(current.state) - ) { - return - } - const targetState = - (data?.state as ClientToolCallState) || - (data?.success ? ClientToolCallState.success : ClientToolCallState.error) - const updatedMap = { ...toolCallsById } - updatedMap[toolCallId] = { - ...current, - state: targetState, - result: { - success: !!data?.success, - output: data?.result ?? undefined, - error: (data?.error as string) ?? undefined, - }, - display: resolveToolDisplay( - current.name, - targetState, - current.id, - current.params, - current.serverUI - ), - } - set({ toolCallsById: updatedMap }) - - if (targetState === ClientToolCallState.success && current.name === 'checkoff_todo') { - try { - const result = asRecord(data?.result) || asRecord(eventData.result) - const input = asRecord(current.params || current.input) - const todoId = (input.id || input.todoId || result.id || result.todoId) as - | string - | undefined - if (todoId) { - get().updatePlanTodoStatus(todoId, 'completed') - } - } catch (error) { - logger.warn('Failed to process checkoff_todo tool result', { - error: error instanceof Error ? error.message : String(error), - toolCallId, - }) - } - } - - if ( - targetState === ClientToolCallState.success && - current.name === 'mark_todo_in_progress' - ) { - try { - const result = asRecord(data?.result) || asRecord(eventData.result) - const input = asRecord(current.params || current.input) - const todoId = (input.id || input.todoId || result.id || result.todoId) as - | string - | undefined - if (todoId) { - get().updatePlanTodoStatus(todoId, 'executing') - } - } catch (error) { - logger.warn('Failed to process mark_todo_in_progress tool result', { - error: error instanceof Error ? error.message : String(error), - toolCallId, - }) - } - } - - if (current.name === 'edit_workflow') { - try { - const resultPayload = asRecord( - data?.result || eventData.result || eventData.data || data?.data - ) - const input = asRecord(current.params || current.input) - const workflowId = - (input?.workflowId as string) || useWorkflowRegistry.getState().activeWorkflowId - - if (!workflowId) { - logger.warn('[SSE] edit_workflow result has no workflowId, skipping diff') - } else { - const baselineWorkflow = captureBaselineSnapshot(workflowId) - - // Re-fetch the state the server just wrote to DB. - // Never use the response's workflowState directly — that would - // mean client and server independently track state, creating - // race conditions when the build agent makes sequential calls. - logger.info('[SSE] edit_workflow success, fetching state from DB', { workflowId }) - fetch(`/api/workflows/${workflowId}/state`) - .then((res) => { - if (!res.ok) throw new Error(`State fetch failed: ${res.status}`) - return res.json() - }) - .then((freshState) => { - const diffStore = useWorkflowDiffStore.getState() - return diffStore.setProposedChanges(freshState as WorkflowState, undefined, { - baselineWorkflow, - skipPersist: true, - }) - }) - .catch((err) => { - logger.error('[SSE] Failed to fetch/apply edit_workflow state', { - error: err instanceof Error ? err.message : String(err), - workflowId, - }) - // Fallback: use the response's workflowState if DB fetch failed - if (resultPayload?.workflowState) { - const diffStore = useWorkflowDiffStore.getState() - diffStore - .setProposedChanges(resultPayload.workflowState as WorkflowState, undefined, { - baselineWorkflow, - skipPersist: true, - }) - .catch(() => {}) - } - }) - } - } catch (err) { - logger.error('[SSE] edit_workflow result handling failed', { - error: err instanceof Error ? err.message : String(err), - }) - } - } - - if ( - targetState === ClientToolCallState.success && - (current.name === 'deploy_api' || - current.name === 'deploy_chat' || - current.name === 'deploy_mcp' || - current.name === 'redeploy') - ) { - try { - const resultPayload = asRecord( - data?.result || eventData.result || eventData.data || data?.data - ) - if (typeof resultPayload?.isDeployed === 'boolean') { - const input = asRecord(current.params) - const workflowId = - (resultPayload?.workflowId as string) || - (input?.workflowId as string) || - useWorkflowRegistry.getState().activeWorkflowId - const isDeployed = resultPayload.isDeployed as boolean - const serverDeployedAt = resultPayload.deployedAt - ? new Date(resultPayload.deployedAt as string) - : undefined - if (workflowId) { - useWorkflowRegistry - .getState() - .setDeploymentStatus( - workflowId, - isDeployed, - isDeployed ? (serverDeployedAt ?? new Date()) : undefined - ) - logger.info('[SSE] Updated deployment status from tool result', { - toolName: current.name, - workflowId, - isDeployed, - }) - } - } - } catch (err) { - logger.warn('[SSE] Failed to hydrate deployment status', { - error: err instanceof Error ? err.message : String(err), - }) - } - } - - // Environment variables: reload store after successful set - if ( - targetState === ClientToolCallState.success && - current.name === 'set_environment_variables' - ) { - try { - useEnvironmentStore.getState().loadEnvironmentVariables() - logger.info('[SSE] Triggered environment variables reload') - } catch (err) { - logger.warn('[SSE] Failed to reload environment variables', { - error: err instanceof Error ? err.message : String(err), - }) - } - } - - // Workflow variables: reload store after successful set - if ( - targetState === ClientToolCallState.success && - current.name === 'set_global_workflow_variables' - ) { - try { - const input = asRecord(current.params) - const workflowId = - (input?.workflowId as string) || useWorkflowRegistry.getState().activeWorkflowId - if (workflowId) { - useVariablesStore.getState().loadForWorkflow(workflowId) - logger.info('[SSE] Triggered workflow variables reload', { workflowId }) - } - } catch (err) { - logger.warn('[SSE] Failed to reload workflow variables', { - error: err instanceof Error ? err.message : String(err), - }) - } - } - - // Generate API key: update deployment status with the new key - if (targetState === ClientToolCallState.success && current.name === 'generate_api_key') { - try { - const resultPayload = asRecord( - data?.result || eventData.result || eventData.data || data?.data - ) - const input = asRecord(current.params) - const workflowId = - (input?.workflowId as string) || useWorkflowRegistry.getState().activeWorkflowId - const apiKey = (resultPayload?.apiKey || resultPayload?.key) as string | undefined - if (workflowId) { - const existingStatus = useWorkflowRegistry - .getState() - .getWorkflowDeploymentStatus(workflowId) - useWorkflowRegistry - .getState() - .setDeploymentStatus( - workflowId, - existingStatus?.isDeployed ?? false, - existingStatus?.deployedAt, - apiKey - ) - logger.info('[SSE] Updated deployment status with API key', { - workflowId, - hasKey: !!apiKey, - }) - } - } catch (err) { - logger.warn('[SSE] Failed to hydrate API key status', { - error: err instanceof Error ? err.message : String(err), - }) - } - } - } - - const blockState = - (data?.state as ClientToolCallState) || - (data?.success ? ClientToolCallState.success : ClientToolCallState.error) - for (let i = 0; i < context.contentBlocks.length; i++) { - const b = context.contentBlocks[i] - if (b?.type === 'tool_call' && b?.toolCall?.id === toolCallId) { - if ( - isRejectedState(b.toolCall?.state) || - isReviewState(b.toolCall?.state) || - isBackgroundState(b.toolCall?.state) - ) - break - context.contentBlocks[i] = { - ...b, - toolCall: { - ...b.toolCall, - state: blockState, - display: resolveToolDisplay( - b.toolCall?.name, - blockState, - toolCallId, - b.toolCall?.params, - b.toolCall?.serverUI - ), - }, - } - break - } - } - updateStreamingMessage(set, context) - } catch (error) { - logger.warn('Failed to process tool_result SSE event', { - error: error instanceof Error ? error.message : String(error), - }) - } - }, - tool_error: (data, context, get, set) => { - try { - const errorData = asRecord(data?.data) - const toolCallId: string | undefined = - data?.toolCallId || (errorData.id as string | undefined) - if (!toolCallId) return - const targetState = (data?.state as ClientToolCallState) || ClientToolCallState.error - const { toolCallsById } = get() - const current = toolCallsById[toolCallId] - if (current) { - if ( - isRejectedState(current.state) || - isReviewState(current.state) || - isBackgroundState(current.state) - ) { - return - } - const updatedMap = { ...toolCallsById } - updatedMap[toolCallId] = { - ...current, - state: targetState, - display: resolveToolDisplay( - current.name, - targetState, - current.id, - current.params, - current.serverUI - ), - } - set({ toolCallsById: updatedMap }) - } - for (let i = 0; i < context.contentBlocks.length; i++) { - const b = context.contentBlocks[i] - if (b?.type === 'tool_call' && b?.toolCall?.id === toolCallId) { - if ( - isRejectedState(b.toolCall?.state) || - isReviewState(b.toolCall?.state) || - isBackgroundState(b.toolCall?.state) - ) - break - context.contentBlocks[i] = { - ...b, - toolCall: { - ...b.toolCall, - state: targetState, - display: resolveToolDisplay( - b.toolCall?.name, - targetState, - toolCallId, - b.toolCall?.params, - b.toolCall?.serverUI - ), - }, - } - break - } - } - updateStreamingMessage(set, context) - } catch (error) { - logger.warn('Failed to process tool_error SSE event', { - error: error instanceof Error ? error.message : String(error), - }) - } - }, - tool_call_delta: (data, context, get, set) => { - const toolCallId = data?.toolCallId - if (!toolCallId) return - - const delta = typeof data?.data === 'string' ? data.data : '' - if (!delta) return - - const { toolCallsById } = get() - const existing = toolCallsById[toolCallId] - if (!existing) return - - const updated: CopilotToolCall = { - ...existing, - streamingArgs: (existing.streamingArgs ?? '') + delta, - } - set({ toolCallsById: { ...toolCallsById, [toolCallId]: updated } }) - upsertToolCallBlock(context, updated) - updateStreamingMessage(set, context) - }, - tool_generating: (data, context, get, set) => { - const { toolCallId, toolName } = data - if (!toolCallId || !toolName) return - const { toolCallsById } = get() - - if (!toolCallsById[toolCallId]) { - const tc: CopilotToolCall = { - id: toolCallId, - name: toolName, - state: ClientToolCallState.generating, - display: resolveToolDisplay(toolName, ClientToolCallState.generating, toolCallId), - } - const updated = { ...toolCallsById, [toolCallId]: tc } - set({ toolCallsById: updated }) - logger.info('[toolCallsById] map updated', updated) - - upsertToolCallBlock(context, tc) - updateStreamingMessage(set, context) - } - }, - tool_call: (data, context, get, set) => { - const toolData = asRecord(data?.data) - const id: string | undefined = (toolData.id as string | undefined) || data?.toolCallId - const name: string | undefined = (toolData.name as string | undefined) || data?.toolName - if (!id) return - const args = toolData.arguments as Record | undefined - const isPartial = toolData.partial === true - const { toolCallsById } = get() - - const rawUI = (toolData.ui || data?.ui) as Record | undefined - const serverUI = rawUI - ? { - title: rawUI.title as string | undefined, - phaseLabel: rawUI.phaseLabel as string | undefined, - icon: rawUI.icon as string | undefined, - } - : undefined - - const existing = toolCallsById[id] - const toolName = name || existing?.name || 'unknown_tool' - - const clientExecutable = rawUI?.clientExecutable === true - - let initialState: ClientToolCallState - if (isPartial) { - initialState = existing?.state || ClientToolCallState.generating - } else { - initialState = (data?.state as ClientToolCallState) || ClientToolCallState.executing - } - - if ( - existing?.state === ClientToolCallState.executing && - initialState === ClientToolCallState.pending - ) { - initialState = ClientToolCallState.executing - } - - const effectiveServerUI = serverUI || existing?.serverUI - - const next: CopilotToolCall = existing - ? { - ...existing, - name: toolName, - state: initialState, - ...(args ? { params: args } : {}), - ...(effectiveServerUI ? { serverUI: effectiveServerUI } : {}), - ...(clientExecutable ? { clientExecutable: true } : {}), - ...(!isPartial ? { streamingArgs: undefined } : {}), - display: resolveToolDisplay( - toolName, - initialState, - id, - args || existing.params, - effectiveServerUI - ), - } - : { - id, - name: toolName, - state: initialState, - ...(args ? { params: args } : {}), - ...(serverUI ? { serverUI } : {}), - ...(clientExecutable ? { clientExecutable: true } : {}), - display: resolveToolDisplay(toolName, initialState, id, args, serverUI), - } - const updated = { ...toolCallsById, [id]: next } - set({ toolCallsById: updated }) - logger.info(`[toolCallsById] → ${initialState}`, { id, name: toolName, params: args }) - - upsertToolCallBlock(context, next) - updateStreamingMessage(set, context) - - if (isPartial) { - return - } - - if (toolName === 'oauth_request_access' && args && typeof window !== 'undefined') { - try { - window.dispatchEvent( - new CustomEvent('open-oauth-connect', { - detail: { - providerName: (args.providerName || args.provider_name || '') as string, - serviceId: (args.serviceId || args.service_id || '') as string, - providerId: (args.providerId || args.provider_id || '') as string, - requiredScopes: (args.requiredScopes || args.required_scopes || []) as string[], - newScopes: (args.newScopes || args.new_scopes || []) as string[], - }, - }) - ) - logger.info('[SSE] Dispatched OAuth connect event', { - providerId: args.providerId || args.provider_id, - providerName: args.providerName || args.provider_name, - }) - } catch (err) { - logger.warn('[SSE] Failed to dispatch OAuth connect event', { - error: err instanceof Error ? err.message : String(err), - }) - } - } - - return - }, - reasoning: (data, context, _get, set) => { - const phase = (data && (data.phase || data?.data?.phase)) as string | undefined - if (phase === 'start') { - beginThinkingBlock(context) - updateStreamingMessage(set, context) - return - } - if (phase === 'end') { - finalizeThinkingBlock(context) - updateStreamingMessage(set, context) - return - } - const chunk: string = typeof data?.data === 'string' ? data.data : data?.content || '' - if (!chunk) return - appendThinkingContent(context, chunk) - updateStreamingMessage(set, context) - }, - context_compaction_start: (_data, context, get, set) => { - const id = `compaction_${Date.now()}` - context.activeCompactionId = id - const toolName = 'context_compaction' - const state = ClientToolCallState.executing - const tc: CopilotToolCall = { - id, - name: toolName, - state, - params: {}, - display: resolveToolDisplay(toolName, state, id, {}), - } - const { toolCallsById } = get() - set({ toolCallsById: { ...toolCallsById, [id]: tc } }) - upsertToolCallBlock(context, tc) - updateStreamingMessage(set, context) - }, - context_compaction: (data, context, get, set) => { - const eventData = asRecord(data?.data) - const summaryChars = (eventData.summary_chars as number) || 0 - const id = context.activeCompactionId || `compaction_${Date.now()}` - context.activeCompactionId = undefined - const toolName = 'context_compaction' - const state = ClientToolCallState.success - const params = { summary_chars: summaryChars } - const tc: CopilotToolCall = { - id, - name: toolName, - state, - params, - display: resolveToolDisplay(toolName, state, id, params), - } - const { toolCallsById } = get() - set({ toolCallsById: { ...toolCallsById, [id]: tc } }) - upsertToolCallBlock(context, tc) - updateStreamingMessage(set, context) - }, - content: (data, context, get, set) => { - if (typeof data.data !== 'string') return - context.pendingContent += data.data - processContentBuffer(context, get, set) - }, - done: (_data, context) => { - logger.info('[SSE] DONE EVENT RECEIVED', { - doneEventCount: context.doneEventCount, - data: _data, - }) - context.doneEventCount++ - if (context.doneEventCount >= 1) { - logger.info('[SSE] Setting streamComplete = true, stream will terminate') - context.streamComplete = true - } - }, - error: (data, context, _get, set) => { - logger.error('Stream error:', data.error) - set((state: CopilotStore) => ({ - messages: getStreamMessages(state).map((msg) => - msg.id === context.messageId - ? { - ...msg, - content: context.accumulatedContent || 'An error occurred.', - error: data.error, - } - : msg - ), - })) - context.streamComplete = true - }, - stream_end: (_data, context, get, set) => { - if (context.pendingContent) { - if (context.isInThinkingBlock && context.currentThinkingBlock) { - appendThinkingContent(context, context.pendingContent) - } else if (context.pendingContent.trim()) { - appendTextBlock(context, context.pendingContent) - } - context.pendingContent = '' - } - finalizeThinkingBlock(context) - updateStreamingMessage(set, context) - abortInProgressTools(set, get) - }, - default: () => {}, -} diff --git a/apps/sim/lib/copilot/orchestrator/tool-executor/access.ts b/apps/sim/lib/copilot/orchestrator/tool-executor/access.ts index 7d62753209d..b989ea34396 100644 --- a/apps/sim/lib/copilot/orchestrator/tool-executor/access.ts +++ b/apps/sim/lib/copilot/orchestrator/tool-executor/access.ts @@ -2,13 +2,14 @@ import { db } from '@sim/db' import { permissions, workspace } from '@sim/db/schema' import { and, desc, eq, isNull } from 'drizzle-orm' import { authorizeWorkflowByWorkspacePermission, type getWorkflowById } from '@/lib/workflows/utils' -import { checkWorkspaceAccess } from '@/lib/workspaces/permissions/utils' +import { checkWorkspaceAccess, getUserEntityPermissions } from '@/lib/workspaces/permissions/utils' type WorkflowRecord = NonNullable>> export async function ensureWorkflowAccess( workflowId: string, - userId: string + userId: string, + action: 'read' | 'write' | 'admin' = 'read' ): Promise<{ workflow: WorkflowRecord workspaceId?: string | null @@ -16,7 +17,7 @@ export async function ensureWorkflowAccess( const result = await authorizeWorkflowByWorkspacePermission({ workflowId, userId, - action: 'read', + action, }) if (!result.workflow) { @@ -56,25 +57,25 @@ export async function getDefaultWorkspaceId(userId: string): Promise { export async function ensureWorkspaceAccess( workspaceId: string, userId: string, - requireWrite: boolean + level: 'read' | 'write' | 'admin' = 'read' ): Promise { const access = await checkWorkspaceAccess(workspaceId, userId) if (!access.exists || !access.hasAccess) { throw new Error(`Workspace ${workspaceId} not found`) } - const permissionType = access.canWrite - ? 'write' - : access.workspace?.ownerId === userId - ? 'admin' - : 'read' - const canWrite = permissionType === 'admin' || permissionType === 'write' + if (level === 'read') return - if (requireWrite && !canWrite) { - throw new Error('Write or admin access required for this workspace') + if (level === 'admin') { + if (access.workspace?.ownerId === userId) return + const perm = await getUserEntityPermissions(userId, 'workspace', workspaceId) + if (perm !== 'admin') { + throw new Error('Admin access required for this workspace') + } + return } - if (!requireWrite && !canWrite && permissionType !== 'read') { - throw new Error('Access denied to workspace') + if (!access.canWrite) { + throw new Error('Write or admin access required for this workspace') } } diff --git a/apps/sim/lib/copilot/orchestrator/tool-executor/deployment-tools/deploy.ts b/apps/sim/lib/copilot/orchestrator/tool-executor/deployment-tools/deploy.ts index 1d830cdfd34..062bedaceb6 100644 --- a/apps/sim/lib/copilot/orchestrator/tool-executor/deployment-tools/deploy.ts +++ b/apps/sim/lib/copilot/orchestrator/tool-executor/deployment-tools/deploy.ts @@ -2,15 +2,18 @@ import crypto from 'crypto' import { db } from '@sim/db' import { chat, workflowMcpTool } from '@sim/db/schema' import { and, eq, isNull } from 'drizzle-orm' +import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log' import type { ExecutionContext, ToolCallResult } from '@/lib/copilot/orchestrator/types' import { getBaseUrl } from '@/lib/core/utils/urls' import { mcpPubSub } from '@/lib/mcp/pubsub' -import { - generateParameterSchemaForWorkflow, - removeMcpToolsForWorkflow, -} from '@/lib/mcp/workflow-mcp-sync' +import { generateParameterSchemaForWorkflow } from '@/lib/mcp/workflow-mcp-sync' import { sanitizeToolName } from '@/lib/mcp/workflow-tool-schema' -import { deployWorkflow, undeployWorkflow } from '@/lib/workflows/persistence/utils' +import { + performChatDeploy, + performChatUndeploy, + performFullDeploy, + performFullUndeploy, +} from '@/lib/workflows/orchestration' import { checkChatAccess, checkWorkflowAccessForChatCreation } from '@/app/api/chat/utils' import { ensureWorkflowAccess } from '../access' import type { DeployApiParams, DeployChatParams, DeployMcpParams } from '../param-types' @@ -25,20 +28,23 @@ export async function executeDeployApi( return { success: false, error: 'workflowId is required' } } const action = params.action === 'undeploy' ? 'undeploy' : 'deploy' - const { workflow: workflowRecord } = await ensureWorkflowAccess(workflowId, context.userId) + const { workflow: workflowRecord } = await ensureWorkflowAccess( + workflowId, + context.userId, + 'admin' + ) if (action === 'undeploy') { - const result = await undeployWorkflow({ workflowId }) + const result = await performFullUndeploy({ workflowId, userId: context.userId }) if (!result.success) { return { success: false, error: result.error || 'Failed to undeploy workflow' } } - await removeMcpToolsForWorkflow(workflowId, crypto.randomUUID().slice(0, 8)) return { success: true, output: { workflowId, isDeployed: false } } } - const result = await deployWorkflow({ + const result = await performFullDeploy({ workflowId, - deployedBy: context.userId, + userId: context.userId, workflowName: workflowRecord.name || undefined, }) if (!result.success) { @@ -82,11 +88,21 @@ export async function executeDeployChat( if (!existing.length) { return { success: false, error: 'No active chat deployment found for this workflow' } } - const { hasAccess } = await checkChatAccess(existing[0].id, context.userId) + const { hasAccess, workspaceId: chatWorkspaceId } = await checkChatAccess( + existing[0].id, + context.userId + ) if (!hasAccess) { return { success: false, error: 'Unauthorized chat access' } } - await db.delete(chat).where(eq(chat.id, existing[0].id)) + const undeployResult = await performChatUndeploy({ + chatId: existing[0].id, + userId: context.userId, + workspaceId: chatWorkspaceId, + }) + if (!undeployResult.success) { + return { success: false, error: undeployResult.error || 'Failed to undeploy chat' } + } return { success: true, output: { @@ -99,17 +115,19 @@ export async function executeDeployChat( } } - const { hasAccess } = await checkWorkflowAccessForChatCreation(workflowId, context.userId) - if (!hasAccess) { + const { hasAccess, workflow: workflowRecord } = await checkWorkflowAccessForChatCreation( + workflowId, + context.userId + ) + if (!hasAccess || !workflowRecord) { return { success: false, error: 'Workflow not found or access denied' } } - const existing = await db + const [existingDeployment] = await db .select() .from(chat) .where(and(eq(chat.workflowId, workflowId), isNull(chat.archivedAt))) .limit(1) - const existingDeployment = existing[0] || null const identifier = String(params.identifier || existingDeployment?.identifier || '').trim() const title = String(params.title || existingDeployment?.title || '').trim() @@ -134,21 +152,14 @@ export async function executeDeployChat( return { success: false, error: 'Identifier already in use' } } - const deployResult = await deployWorkflow({ - workflowId, - deployedBy: context.userId, - }) - if (!deployResult.success) { - return { success: false, error: deployResult.error || 'Failed to deploy workflow' } - } - const existingCustomizations = (existingDeployment?.customizations as | { primaryColor?: string; welcomeMessage?: string } | undefined) || {} - const payload = { + const result = await performChatDeploy({ workflowId, + userId: context.userId, identifier, title, description: String(params.description || existingDeployment?.description || ''), @@ -162,46 +173,22 @@ export async function executeDeployChat( existingCustomizations.welcomeMessage || 'Hi there! How can I help you today?', }, - authType: params.authType || existingDeployment?.authType || 'public', + authType: (params.authType || existingDeployment?.authType || 'public') as + | 'public' + | 'password' + | 'email' + | 'sso', password: params.password, - allowedEmails: params.allowedEmails || existingDeployment?.allowedEmails || [], - outputConfigs: params.outputConfigs || existingDeployment?.outputConfigs || [], - } + allowedEmails: params.allowedEmails || (existingDeployment?.allowedEmails as string[]) || [], + outputConfigs: (params.outputConfigs || existingDeployment?.outputConfigs || []) as Array<{ + blockId: string + path: string + }>, + workspaceId: workflowRecord.workspaceId, + }) - if (existingDeployment) { - await db - .update(chat) - .set({ - identifier: payload.identifier, - title: payload.title, - description: payload.description, - customizations: payload.customizations, - authType: payload.authType, - password: payload.password || existingDeployment.password, - allowedEmails: - payload.authType === 'email' || payload.authType === 'sso' ? payload.allowedEmails : [], - outputConfigs: payload.outputConfigs, - updatedAt: new Date(), - }) - .where(eq(chat.id, existingDeployment.id)) - } else { - await db.insert(chat).values({ - id: crypto.randomUUID(), - workflowId, - userId: context.userId, - identifier: payload.identifier, - title: payload.title, - description: payload.description, - customizations: payload.customizations, - isActive: true, - authType: payload.authType, - password: payload.password || null, - allowedEmails: - payload.authType === 'email' || payload.authType === 'sso' ? payload.allowedEmails : [], - outputConfigs: payload.outputConfigs, - createdAt: new Date(), - updatedAt: new Date(), - }) + if (!result.success) { + return { success: false, error: result.error || 'Failed to deploy chat' } } const baseUrl = getBaseUrl() @@ -214,7 +201,7 @@ export async function executeDeployChat( isDeployed: true, isChatDeployed: true, identifier, - chatUrl: `${baseUrl}/chat/${identifier}`, + chatUrl: result.chatUrl, apiEndpoint: `${baseUrl}/api/workflows/${workflowId}/run`, baseUrl, }, @@ -234,7 +221,11 @@ export async function executeDeployMcp( return { success: false, error: 'workflowId is required' } } - const { workflow: workflowRecord } = await ensureWorkflowAccess(workflowId, context.userId) + const { workflow: workflowRecord } = await ensureWorkflowAccess( + workflowId, + context.userId, + 'admin' + ) const workspaceId = workflowRecord.workspaceId if (!workspaceId) { return { success: false, error: 'workspaceId is required' } @@ -263,8 +254,15 @@ export async function executeDeployMcp( mcpPubSub?.publishWorkflowToolsChanged({ serverId, workspaceId }) - // Intentionally omits `isDeployed` — removing from an MCP server does not - // affect the workflow's API deployment. + recordAudit({ + workspaceId, + actorId: context.userId, + action: AuditAction.MCP_SERVER_REMOVED, + resourceType: AuditResourceType.MCP_SERVER, + resourceId: serverId, + description: `Undeployed workflow "${workflowId}" from MCP server`, + }) + return { success: true, output: { workflowId, serverId, action: 'undeploy', removed: true }, @@ -319,6 +317,15 @@ export async function executeDeployMcp( mcpPubSub?.publishWorkflowToolsChanged({ serverId, workspaceId }) + recordAudit({ + workspaceId, + actorId: context.userId, + action: AuditAction.MCP_SERVER_UPDATED, + resourceType: AuditResourceType.MCP_SERVER, + resourceId: serverId, + description: `Updated MCP tool "${toolName}" on server`, + }) + return { success: true, output: { toolId, toolName, toolDescription, updated: true, mcpServerUrl, baseUrl }, @@ -339,6 +346,15 @@ export async function executeDeployMcp( mcpPubSub?.publishWorkflowToolsChanged({ serverId, workspaceId }) + recordAudit({ + workspaceId, + actorId: context.userId, + action: AuditAction.MCP_SERVER_ADDED, + resourceType: AuditResourceType.MCP_SERVER, + resourceId: serverId, + description: `Deployed workflow as MCP tool "${toolName}"`, + }) + return { success: true, output: { toolId, toolName, toolDescription, updated: false, mcpServerUrl, baseUrl }, @@ -357,9 +373,9 @@ export async function executeRedeploy( if (!workflowId) { return { success: false, error: 'workflowId is required' } } - await ensureWorkflowAccess(workflowId, context.userId) + await ensureWorkflowAccess(workflowId, context.userId, 'admin') - const result = await deployWorkflow({ workflowId, deployedBy: context.userId }) + const result = await performFullDeploy({ workflowId, userId: context.userId }) if (!result.success) { return { success: false, error: result.error || 'Failed to redeploy workflow' } } diff --git a/apps/sim/lib/copilot/orchestrator/tool-executor/deployment-tools/manage.ts b/apps/sim/lib/copilot/orchestrator/tool-executor/deployment-tools/manage.ts index 6669867b05a..ecc2456d544 100644 --- a/apps/sim/lib/copilot/orchestrator/tool-executor/deployment-tools/manage.ts +++ b/apps/sim/lib/copilot/orchestrator/tool-executor/deployment-tools/manage.ts @@ -8,12 +8,13 @@ import { workflowMcpTool, } from '@sim/db/schema' import { and, eq, inArray, isNull } from 'drizzle-orm' +import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log' import type { ExecutionContext, ToolCallResult } from '@/lib/copilot/orchestrator/types' import { mcpPubSub } from '@/lib/mcp/pubsub' import { generateParameterSchemaForWorkflow } from '@/lib/mcp/workflow-mcp-sync' import { sanitizeToolName } from '@/lib/mcp/workflow-tool-schema' import { hasValidStartBlock } from '@/lib/workflows/triggers/trigger-utils.server' -import { ensureWorkflowAccess } from '../access' +import { ensureWorkflowAccess, ensureWorkspaceAccess } from '../access' import type { CheckDeploymentStatusParams, CreateWorkspaceMcpServerParams, @@ -182,7 +183,11 @@ export async function executeCreateWorkspaceMcpServer( if (!workflowId) { return { success: false, error: 'workflowId is required' } } - const { workflow: workflowRecord } = await ensureWorkflowAccess(workflowId, context.userId) + const { workflow: workflowRecord } = await ensureWorkflowAccess( + workflowId, + context.userId, + 'write' + ) const workspaceId = workflowRecord.workspaceId if (!workspaceId) { return { success: false, error: 'workspaceId is required' } @@ -242,6 +247,16 @@ export async function executeCreateWorkspaceMcpServer( mcpPubSub?.publishWorkflowToolsChanged({ serverId, workspaceId }) } + recordAudit({ + workspaceId, + actorId: context.userId, + action: AuditAction.MCP_SERVER_ADDED, + resourceType: AuditResourceType.MCP_SERVER, + resourceId: serverId, + resourceName: name, + description: `Created MCP server "${name}"`, + }) + return { success: true, output: { server, addedTools } } } catch (error) { return { success: false, error: error instanceof Error ? error.message : String(error) } @@ -277,7 +292,10 @@ export async function executeUpdateWorkspaceMcpServer( } const [existing] = await db - .select({ id: workflowMcpServer.id, createdBy: workflowMcpServer.createdBy }) + .select({ + id: workflowMcpServer.id, + workspaceId: workflowMcpServer.workspaceId, + }) .from(workflowMcpServer) .where(eq(workflowMcpServer.id, serverId)) .limit(1) @@ -286,8 +304,18 @@ export async function executeUpdateWorkspaceMcpServer( return { success: false, error: 'MCP server not found' } } + await ensureWorkspaceAccess(existing.workspaceId, context.userId, 'write') + await db.update(workflowMcpServer).set(updates).where(eq(workflowMcpServer.id, serverId)) + recordAudit({ + actorId: context.userId, + action: AuditAction.MCP_SERVER_UPDATED, + resourceType: AuditResourceType.MCP_SERVER, + resourceId: params.serverId, + description: `Updated MCP server`, + }) + return { success: true, output: { serverId, ...updates, updatedAt: undefined } } } catch (error) { return { success: false, error: error instanceof Error ? error.message : String(error) } @@ -318,10 +346,20 @@ export async function executeDeleteWorkspaceMcpServer( return { success: false, error: 'MCP server not found' } } + await ensureWorkspaceAccess(existing.workspaceId, context.userId, 'admin') + await db.delete(workflowMcpServer).where(eq(workflowMcpServer.id, serverId)) mcpPubSub?.publishWorkflowToolsChanged({ serverId, workspaceId: existing.workspaceId }) + recordAudit({ + actorId: context.userId, + action: AuditAction.MCP_SERVER_REMOVED, + resourceType: AuditResourceType.MCP_SERVER, + resourceId: params.serverId, + description: `Deleted MCP server`, + }) + return { success: true, output: { serverId, name: existing.name, deleted: true } } } catch (error) { return { success: false, error: error instanceof Error ? error.message : String(error) } @@ -379,7 +417,7 @@ export async function executeRevertToVersion( return { success: false, error: 'version is required' } } - await ensureWorkflowAccess(workflowId, context.userId) + await ensureWorkflowAccess(workflowId, context.userId, 'admin') const baseUrl = process.env.NEXT_PUBLIC_APP_URL || process.env.APP_URL || 'http://localhost:3000' diff --git a/apps/sim/lib/copilot/orchestrator/tool-executor/index.ts b/apps/sim/lib/copilot/orchestrator/tool-executor/index.ts index e824f25a064..95252daf8bf 100644 --- a/apps/sim/lib/copilot/orchestrator/tool-executor/index.ts +++ b/apps/sim/lib/copilot/orchestrator/tool-executor/index.ts @@ -2,6 +2,7 @@ import { db } from '@sim/db' import { credential, mcpServers, pendingCredentialDraft, user } from '@sim/db/schema' import { createLogger } from '@sim/logger' import { and, eq, isNull, lt } from 'drizzle-orm' +import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log' import { SIM_AGENT_API_URL } from '@/lib/copilot/constants' import type { ExecutionContext, @@ -231,6 +232,16 @@ async function executeManageCustomTool( }) const created = resultTools.find((tool) => tool.title === title) + recordAudit({ + workspaceId, + actorId: context.userId, + action: AuditAction.CUSTOM_TOOL_CREATED, + resourceType: AuditResourceType.CUSTOM_TOOL, + resourceId: created?.id, + resourceName: title, + description: `Created custom tool "${title}"`, + }) + return { success: true, output: { @@ -279,6 +290,16 @@ async function executeManageCustomTool( userId: context.userId, }) + recordAudit({ + workspaceId, + actorId: context.userId, + action: AuditAction.CUSTOM_TOOL_UPDATED, + resourceType: AuditResourceType.CUSTOM_TOOL, + resourceId: params.toolId, + resourceName: title, + description: `Updated custom tool "${title}"`, + }) + return { success: true, output: { @@ -305,6 +326,15 @@ async function executeManageCustomTool( return { success: false, error: `Custom tool not found: ${params.toolId}` } } + recordAudit({ + workspaceId, + actorId: context.userId, + action: AuditAction.CUSTOM_TOOL_DELETED, + resourceType: AuditResourceType.CUSTOM_TOOL, + resourceId: params.toolId, + description: 'Deleted custom tool', + }) + return { success: true, output: { @@ -461,6 +491,18 @@ async function executeManageMcpTool( await mcpService.clearCache(workspaceId) + recordAudit({ + workspaceId, + actorId: context.userId, + action: AuditAction.MCP_SERVER_ADDED, + resourceType: AuditResourceType.MCP_SERVER, + resourceId: serverId, + resourceName: config.name, + description: existing + ? `Updated existing MCP server "${config.name}"` + : `Added MCP server "${config.name}"`, + }) + return { success: true, output: { @@ -514,6 +556,15 @@ async function executeManageMcpTool( await mcpService.clearCache(workspaceId) + recordAudit({ + workspaceId, + actorId: context.userId, + action: AuditAction.MCP_SERVER_UPDATED, + resourceType: AuditResourceType.MCP_SERVER, + resourceId: params.serverId, + description: `Updated MCP server "${updated.name}"`, + }) + return { success: true, output: { @@ -527,6 +578,13 @@ async function executeManageMcpTool( } if (operation === 'delete') { + if (context.userPermission && context.userPermission !== 'admin') { + return { + success: false, + error: `Permission denied: 'delete' on manage_mcp_tool requires admin access. You have '${context.userPermission}' permission.`, + } + } + if (!params.serverId) { return { success: false, error: "'serverId' is required for 'delete'" } } @@ -542,6 +600,15 @@ async function executeManageMcpTool( await mcpService.clearCache(workspaceId) + recordAudit({ + workspaceId, + actorId: context.userId, + action: AuditAction.MCP_SERVER_REMOVED, + resourceType: AuditResourceType.MCP_SERVER, + resourceId: params.serverId, + description: `Deleted MCP server "${deleted.name}"`, + }) + return { success: true, output: { @@ -643,6 +710,16 @@ async function executeManageSkill( }) const created = resultSkills.find((s) => s.name === params.name) + recordAudit({ + workspaceId, + actorId: context.userId, + action: AuditAction.SKILL_CREATED, + resourceType: AuditResourceType.SKILL, + resourceId: created?.id, + resourceName: params.name, + description: `Created skill "${params.name}"`, + }) + return { success: true, output: { @@ -685,14 +762,26 @@ async function executeManageSkill( userId: context.userId, }) + const updatedName = params.name || found.name + + recordAudit({ + workspaceId, + actorId: context.userId, + action: AuditAction.SKILL_UPDATED, + resourceType: AuditResourceType.SKILL, + resourceId: params.skillId, + resourceName: updatedName, + description: `Updated skill "${updatedName}"`, + }) + return { success: true, output: { success: true, operation, skillId: params.skillId, - name: params.name || found.name, - message: `Updated skill "${params.name || found.name}"`, + name: updatedName, + message: `Updated skill "${updatedName}"`, }, } } @@ -707,6 +796,15 @@ async function executeManageSkill( return { success: false, error: `Skill not found: ${params.skillId}` } } + recordAudit({ + workspaceId, + actorId: context.userId, + action: AuditAction.SKILL_DELETED, + resourceType: AuditResourceType.SKILL, + resourceId: params.skillId, + description: 'Deleted skill', + }) + return { success: true, output: { @@ -951,10 +1049,24 @@ const SIM_WORKFLOW_TOOL_HANDLERS: Record< .update(credential) .set({ displayName, updatedAt: new Date() }) .where(eq(credential.id, credentialId)) + recordAudit({ + actorId: c.userId, + action: AuditAction.CREDENTIAL_RENAMED, + resourceType: AuditResourceType.OAUTH, + resourceId: credentialId, + description: `Renamed credential to "${displayName}"`, + }) return { success: true, output: { credentialId, displayName } } } case 'delete': { await db.delete(credential).where(eq(credential.id, credentialId)) + recordAudit({ + actorId: c.userId, + action: AuditAction.CREDENTIAL_DELETED, + resourceType: AuditResourceType.OAUTH, + resourceId: credentialId, + description: `Deleted credential`, + }) return { success: true, output: { credentialId, deleted: true } } } default: diff --git a/apps/sim/lib/copilot/orchestrator/tool-executor/job-tools.ts b/apps/sim/lib/copilot/orchestrator/tool-executor/job-tools.ts index f9c48ab5beb..6870bde0d50 100644 --- a/apps/sim/lib/copilot/orchestrator/tool-executor/job-tools.ts +++ b/apps/sim/lib/copilot/orchestrator/tool-executor/job-tools.ts @@ -3,6 +3,7 @@ import { copilotChats, workflowSchedule } from '@sim/db/schema' import { createLogger } from '@sim/logger' import { and, eq, isNull } from 'drizzle-orm' import { v4 as uuidv4 } from 'uuid' +import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log' import type { ExecutionContext, ToolCallResult } from '@/lib/copilot/orchestrator/types' import { parseCronToHumanReadable, validateCronExpression } from '@/lib/workflows/schedules/utils' @@ -150,6 +151,17 @@ export async function executeCreateJob( logger.info('Job created', { jobId, cronExpression, nextRunAt: nextRunAt.toISOString() }) + recordAudit({ + workspaceId: context.workspaceId || null, + actorId: context.userId, + action: AuditAction.SCHEDULE_UPDATED, + resourceType: AuditResourceType.SCHEDULE, + resourceId: jobId, + resourceName: title || undefined, + description: `Created job "${title || jobId}"`, + metadata: { operation: 'create', cronExpression }, + }) + return { success: true, output: { @@ -381,6 +393,19 @@ export async function executeManageJob( logger.info('Job updated', { jobId: args.jobId, fields: Object.keys(updates) }) + recordAudit({ + workspaceId: context.workspaceId || null, + actorId: context.userId, + action: AuditAction.SCHEDULE_UPDATED, + resourceType: AuditResourceType.SCHEDULE, + resourceId: args.jobId, + description: `Updated job`, + metadata: { + operation: 'update', + fields: Object.keys(updates).filter((k) => k !== 'updatedAt'), + }, + }) + return { success: true, output: { @@ -419,6 +444,16 @@ export async function executeManageJob( logger.info('Job deleted', { jobId: args.jobId }) + recordAudit({ + workspaceId: context.workspaceId || null, + actorId: context.userId, + action: AuditAction.SCHEDULE_UPDATED, + resourceType: AuditResourceType.SCHEDULE, + resourceId: args.jobId, + description: `Deleted job`, + metadata: { operation: 'delete' }, + }) + return { success: true, output: { @@ -492,6 +527,16 @@ export async function executeCompleteJob( logger.info('Job completed', { jobId }) + recordAudit({ + workspaceId: context.workspaceId || null, + actorId: context.userId, + action: AuditAction.SCHEDULE_UPDATED, + resourceType: AuditResourceType.SCHEDULE, + resourceId: jobId, + description: `Completed job`, + metadata: { operation: 'complete' }, + }) + return { success: true, output: { jobId, message: 'Job marked as completed. No further executions will occur.' }, diff --git a/apps/sim/lib/copilot/orchestrator/tool-executor/materialize-file.ts b/apps/sim/lib/copilot/orchestrator/tool-executor/materialize-file.ts index 5428e4dff8a..2fce7e695ad 100644 --- a/apps/sim/lib/copilot/orchestrator/tool-executor/materialize-file.ts +++ b/apps/sim/lib/copilot/orchestrator/tool-executor/materialize-file.ts @@ -2,6 +2,7 @@ import { db } from '@sim/db' import { workflow, workspaceFiles } from '@sim/db/schema' import { createLogger } from '@sim/logger' import { and, eq, isNull } from 'drizzle-orm' +import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log' import { findMothershipUploadRowByChatAndName } from '@/lib/copilot/orchestrator/tool-executor/upload-file-reader' import type { ExecutionContext, ToolCallResult } from '@/lib/copilot/orchestrator/types' import { getServePathPrefix } from '@/lib/uploads' @@ -158,6 +159,17 @@ async function executeImport( chatId, }) + recordAudit({ + workspaceId, + actorId: userId, + action: AuditAction.WORKFLOW_CREATED, + resourceType: AuditResourceType.WORKFLOW, + resourceId: workflowId, + resourceName: dedupedName, + description: `Imported workflow "${dedupedName}" from file`, + metadata: { fileName, source: 'copilot-import' }, + }) + return { success: true, output: { diff --git a/apps/sim/lib/copilot/orchestrator/tool-executor/workflow-tools/mutations.ts b/apps/sim/lib/copilot/orchestrator/tool-executor/workflow-tools/mutations.ts index ad43e8f5ab2..c30d6b3e578 100644 --- a/apps/sim/lib/copilot/orchestrator/tool-executor/workflow-tools/mutations.ts +++ b/apps/sim/lib/copilot/orchestrator/tool-executor/workflow-tools/mutations.ts @@ -1,6 +1,7 @@ import crypto from 'crypto' import { createLogger } from '@sim/logger' import { createWorkspaceApiKey } from '@/lib/api-key/auth' +import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log' import type { ExecutionContext, ToolCallResult } from '@/lib/copilot/orchestrator/types' import { generateRequestId } from '@/lib/core/utils/request' import { executeWorkflow } from '@/lib/workflows/executor/execute-workflow' @@ -8,13 +9,13 @@ import { getExecutionState, getLatestExecutionState, } from '@/lib/workflows/executor/execution-state' +import { performDeleteFolder, performDeleteWorkflow } from '@/lib/workflows/orchestration' import { loadWorkflowFromNormalizedTables } from '@/lib/workflows/persistence/utils' import { sanitizeForCopilot } from '@/lib/workflows/sanitization/json-sanitizer' import { + checkForCircularReference, createFolderRecord, createWorkflowRecord, - deleteFolderRecord, - deleteWorkflowRecord, listFolders, setWorkflowVariables, updateFolderRecord, @@ -121,7 +122,7 @@ export async function executeCreateWorkflow( params?.workspaceId || context.workspaceId || (await getDefaultWorkspaceId(context.userId)) const folderId = params?.folderId || null - await ensureWorkspaceAccess(workspaceId, context.userId, true) + await ensureWorkspaceAccess(workspaceId, context.userId, 'write') assertWorkflowMutationNotAborted(context) const result = await createWorkflowRecord({ @@ -132,6 +133,28 @@ export async function executeCreateWorkflow( folderId, }) + recordAudit({ + workspaceId, + actorId: context.userId, + action: AuditAction.WORKFLOW_CREATED, + resourceType: AuditResourceType.WORKFLOW, + resourceId: result.workflowId, + resourceName: name, + description: `Created workflow "${name}"`, + }) + + try { + const { PlatformEvents } = await import('@/lib/core/telemetry') + PlatformEvents.workflowCreated({ + workflowId: result.workflowId, + name, + workspaceId, + folderId: folderId ?? undefined, + }) + } catch (_e) { + // Telemetry is best-effort + } + const normalized = await loadWorkflowFromNormalizedTables(result.workflowId) let copilotSanitizedWorkflowState: unknown if (normalized) { @@ -175,7 +198,7 @@ export async function executeCreateFolder( params?.workspaceId || context.workspaceId || (await getDefaultWorkspaceId(context.userId)) const parentId = params?.parentId || null - await ensureWorkspaceAccess(workspaceId, context.userId, true) + await ensureWorkspaceAccess(workspaceId, context.userId, 'write') assertWorkflowMutationNotAborted(context) const result = await createFolderRecord({ @@ -185,6 +208,16 @@ export async function executeCreateFolder( parentId, }) + recordAudit({ + workspaceId, + actorId: context.userId, + action: AuditAction.FOLDER_CREATED, + resourceType: AuditResourceType.FOLDER, + resourceId: result.folderId, + resourceName: name, + description: `Created folder "${name}"`, + }) + return { success: true, output: result } } catch (error) { return { success: false, error: error instanceof Error ? error.message : String(error) } @@ -201,7 +234,11 @@ export async function executeRunWorkflow( return { success: false, error: 'workflowId is required' } } - const { workflow: workflowRecord } = await ensureWorkflowAccess(workflowId, context.userId) + const { workflow: workflowRecord } = await ensureWorkflowAccess( + workflowId, + context.userId, + 'write' + ) const useDraftState = !params.useDeployedState @@ -236,7 +273,11 @@ export async function executeSetGlobalWorkflowVariables( const operations: VariableOperation[] = Array.isArray(params.operations) ? params.operations : [] - const { workflow: workflowRecord } = await ensureWorkflowAccess(workflowId, context.userId) + const { workflow: workflowRecord } = await ensureWorkflowAccess( + workflowId, + context.userId, + 'write' + ) interface WorkflowVariable { id: string @@ -325,6 +366,14 @@ export async function executeSetGlobalWorkflowVariables( assertWorkflowMutationNotAborted(context) await setWorkflowVariables(workflowId, nextVarsRecord) + recordAudit({ + actorId: context.userId, + action: AuditAction.WORKFLOW_VARIABLES_UPDATED, + resourceType: AuditResourceType.WORKFLOW, + resourceId: workflowId, + description: `Updated workflow variables`, + }) + return { success: true, output: { updated: Object.values(byName).length } } } catch (error) { return { success: false, error: error instanceof Error ? error.message : String(error) } @@ -348,7 +397,7 @@ export async function executeRenameWorkflow( return { success: false, error: 'Workflow name must be 200 characters or less' } } - await ensureWorkflowAccess(workflowId, context.userId) + await ensureWorkflowAccess(workflowId, context.userId, 'write') assertWorkflowMutationNotAborted(context) await updateWorkflowRecord(workflowId, { name }) @@ -368,7 +417,7 @@ export async function executeMoveWorkflow( return { success: false, error: 'workflowId is required' } } - await ensureWorkflowAccess(workflowId, context.userId) + await ensureWorkflowAccess(workflowId, context.userId, 'write') const folderId = params.folderId || null assertWorkflowMutationNotAborted(context) await updateWorkflowRecord(workflowId, { folderId }) @@ -395,6 +444,15 @@ export async function executeMoveFolder( return { success: false, error: 'A folder cannot be moved into itself' } } + if (parentId) { + const wouldCreateCycle = await checkForCircularReference(folderId, parentId) + if (wouldCreateCycle) { + return { success: false, error: 'Cannot create circular folder reference' } + } + } + + const workspaceId = context.workspaceId || (await getDefaultWorkspaceId(context.userId)) + await ensureWorkspaceAccess(workspaceId, context.userId, 'write') assertWorkflowMutationNotAborted(context) await updateFolderRecord(folderId, { parentId }) @@ -417,7 +475,11 @@ export async function executeRunWorkflowUntilBlock( return { success: false, error: 'stopAfterBlockId is required' } } - const { workflow: workflowRecord } = await ensureWorkflowAccess(workflowId, context.userId) + const { workflow: workflowRecord } = await ensureWorkflowAccess( + workflowId, + context.userId, + 'write' + ) const useDraftState = !params.useDeployedState @@ -460,7 +522,7 @@ export async function executeGenerateApiKey( const workspaceId = params.workspaceId || context.workspaceId || (await getDefaultWorkspaceId(context.userId)) - await ensureWorkspaceAccess(workspaceId, context.userId, true) + await ensureWorkspaceAccess(workspaceId, context.userId, 'admin') assertWorkflowMutationNotAborted(context) const newKey = await createWorkspaceApiKey({ @@ -469,6 +531,14 @@ export async function executeGenerateApiKey( name, }) + recordAudit({ + workspaceId, + actorId: context.userId, + action: AuditAction.API_KEY_CREATED, + resourceType: AuditResourceType.API_KEY, + description: `Generated API key for workspace`, + }) + return { success: true, output: { @@ -511,7 +581,11 @@ export async function executeRunFromBlock( } } - const { workflow: workflowRecord } = await ensureWorkflowAccess(workflowId, context.userId) + const { workflow: workflowRecord } = await ensureWorkflowAccess( + workflowId, + context.userId, + 'write' + ) const useDraftState = !params.useDeployedState const result = await executeWorkflow( @@ -569,7 +643,7 @@ export async function executeUpdateWorkflow( return { success: false, error: 'At least one of name or description is required' } } - await ensureWorkflowAccess(workflowId, context.userId) + await ensureWorkflowAccess(workflowId, context.userId, 'write') assertWorkflowMutationNotAborted(context) await updateWorkflowRecord(workflowId, updates) @@ -592,9 +666,17 @@ export async function executeDeleteWorkflow( return { success: false, error: 'workflowId is required' } } - const { workflow: workflowRecord } = await ensureWorkflowAccess(workflowId, context.userId) + const { workflow: workflowRecord } = await ensureWorkflowAccess( + workflowId, + context.userId, + 'admin' + ) assertWorkflowMutationNotAborted(context) - await deleteWorkflowRecord(workflowId) + + const result = await performDeleteWorkflow({ workflowId, userId: context.userId }) + if (!result.success) { + return { success: false, error: result.error || 'Failed to delete workflow' } + } return { success: true, @@ -616,7 +698,7 @@ export async function executeDeleteFolder( } const workspaceId = context.workspaceId || (await getDefaultWorkspaceId(context.userId)) - await ensureWorkspaceAccess(workspaceId, context.userId, true) + await ensureWorkspaceAccess(workspaceId, context.userId, 'admin') const folders = await listFolders(workspaceId) const folder = folders.find((f) => f.folderId === folderId) @@ -625,12 +707,19 @@ export async function executeDeleteFolder( } assertWorkflowMutationNotAborted(context) - const deleted = await deleteFolderRecord(folderId) - if (!deleted) { - return { success: false, error: 'Folder not found' } + + const result = await performDeleteFolder({ + folderId, + workspaceId, + userId: context.userId, + folderName: folder.folderName, + }) + + if (!result.success) { + return { success: false, error: result.error || 'Failed to delete folder' } } - return { success: true, output: { folderId, deleted: true } } + return { success: true, output: { folderId, deleted: true, ...result.deletedItems } } } catch (error) { return { success: false, error: error instanceof Error ? error.message : String(error) } } @@ -653,6 +742,8 @@ export async function executeRenameFolder( return { success: false, error: 'Folder name must be 200 characters or less' } } + const workspaceId = context.workspaceId || (await getDefaultWorkspaceId(context.userId)) + await ensureWorkspaceAccess(workspaceId, context.userId, 'write') assertWorkflowMutationNotAborted(context) await updateFolderRecord(folderId, { name }) @@ -688,7 +779,11 @@ export async function executeRunBlock( } } - const { workflow: workflowRecord } = await ensureWorkflowAccess(workflowId, context.userId) + const { workflow: workflowRecord } = await ensureWorkflowAccess( + workflowId, + context.userId, + 'write' + ) const useDraftState = !params.useDeployedState const result = await executeWorkflow( diff --git a/apps/sim/lib/copilot/orchestrator/tool-executor/workflow-tools/queries.ts b/apps/sim/lib/copilot/orchestrator/tool-executor/workflow-tools/queries.ts index d2ed986c634..2148a2c0cb1 100644 --- a/apps/sim/lib/copilot/orchestrator/tool-executor/workflow-tools/queries.ts +++ b/apps/sim/lib/copilot/orchestrator/tool-executor/workflow-tools/queries.ts @@ -46,7 +46,7 @@ export async function executeListFolders( context.workspaceId || (await getDefaultWorkspaceId(context.userId)) - await ensureWorkspaceAccess(workspaceId, context.userId, false) + await ensureWorkspaceAccess(workspaceId, context.userId, 'read') const folders = await listFolders(workspaceId) diff --git a/apps/sim/lib/workflows/orchestration/chat-deploy.ts b/apps/sim/lib/workflows/orchestration/chat-deploy.ts new file mode 100644 index 00000000000..8eda5054a86 --- /dev/null +++ b/apps/sim/lib/workflows/orchestration/chat-deploy.ts @@ -0,0 +1,206 @@ +import crypto from 'crypto' +import { db } from '@sim/db' +import { chat } from '@sim/db/schema' +import { createLogger } from '@sim/logger' +import { and, eq, isNull } from 'drizzle-orm' +import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log' +import { encryptSecret } from '@/lib/core/security/encryption' +import { getBaseUrl } from '@/lib/core/utils/urls' +import { performFullDeploy } from '@/lib/workflows/orchestration/deploy' + +const logger = createLogger('ChatDeployOrchestration') + +export interface ChatDeployPayload { + workflowId: string + userId: string + identifier: string + title: string + description?: string + customizations?: { primaryColor?: string; welcomeMessage?: string; imageUrl?: string } + authType?: 'public' | 'password' | 'email' | 'sso' + password?: string | null + allowedEmails?: string[] + outputConfigs?: Array<{ blockId: string; path: string }> + workspaceId?: string | null +} + +export interface PerformChatDeployResult { + success: boolean + chatId?: string + chatUrl?: string + error?: string +} + +/** + * Deploys a chat: deploys the underlying workflow via `performFullDeploy`, + * encrypts passwords, creates or updates the chat record, fires telemetry, + * and records an audit entry. Both the chat API route and the copilot + * `deploy_chat` tool must use this function. + */ +export async function performChatDeploy( + params: ChatDeployPayload +): Promise { + const { + workflowId, + userId, + identifier, + title, + description = '', + authType = 'public', + password, + allowedEmails = [], + outputConfigs = [], + } = params + + const customizations = { + primaryColor: params.customizations?.primaryColor || 'var(--brand-hover)', + welcomeMessage: params.customizations?.welcomeMessage || 'Hi there! How can I help you today?', + ...(params.customizations?.imageUrl ? { imageUrl: params.customizations.imageUrl } : {}), + } + + const deployResult = await performFullDeploy({ workflowId, userId }) + if (!deployResult.success) { + return { success: false, error: deployResult.error || 'Failed to deploy workflow' } + } + + let encryptedPassword: string | null = null + if (authType === 'password' && password) { + const { encrypted } = await encryptSecret(password) + encryptedPassword = encrypted + } + + const [existingDeployment] = await db + .select() + .from(chat) + .where(and(eq(chat.workflowId, workflowId), isNull(chat.archivedAt))) + .limit(1) + + let chatId: string + if (existingDeployment) { + chatId = existingDeployment.id + + let passwordToStore: string | null + if (authType === 'password') { + passwordToStore = encryptedPassword || existingDeployment.password + } else { + passwordToStore = null + } + + await db + .update(chat) + .set({ + identifier, + title, + description: description || null, + customizations, + authType, + password: passwordToStore, + allowedEmails: authType === 'email' || authType === 'sso' ? allowedEmails : [], + outputConfigs, + updatedAt: new Date(), + }) + .where(eq(chat.id, chatId)) + } else { + chatId = crypto.randomUUID() + await db.insert(chat).values({ + id: chatId, + workflowId, + userId, + identifier, + title, + description: description || null, + customizations, + isActive: true, + authType, + password: encryptedPassword, + allowedEmails: authType === 'email' || authType === 'sso' ? allowedEmails : [], + outputConfigs, + createdAt: new Date(), + updatedAt: new Date(), + }) + } + + const baseUrl = getBaseUrl() + let chatUrl: string + try { + const url = new URL(baseUrl) + let host = url.host + if (host.startsWith('www.')) { + host = host.substring(4) + } + chatUrl = `${url.protocol}//${host}/chat/${identifier}` + } catch { + chatUrl = `${baseUrl}/chat/${identifier}` + } + + logger.info(`Chat "${title}" deployed successfully at ${chatUrl}`) + + try { + const { PlatformEvents } = await import('@/lib/core/telemetry') + PlatformEvents.chatDeployed({ + chatId, + workflowId, + authType, + hasOutputConfigs: outputConfigs.length > 0, + }) + } catch (_e) { + // Telemetry is best-effort + } + + recordAudit({ + workspaceId: params.workspaceId || null, + actorId: userId, + action: AuditAction.CHAT_DEPLOYED, + resourceType: AuditResourceType.CHAT, + resourceId: chatId, + resourceName: title, + description: `Deployed chat "${title}"`, + metadata: { workflowId, identifier, authType }, + }) + + return { success: true, chatId, chatUrl } +} + +export interface PerformChatUndeployParams { + chatId: string + userId: string + workspaceId?: string | null +} + +export interface PerformChatUndeployResult { + success: boolean + error?: string +} + +/** + * Undeploys a chat: deletes the chat record and records an audit entry. + * Both the chat manage DELETE route and the copilot `deploy_chat` undeploy + * action must use this function. + */ +export async function performChatUndeploy( + params: PerformChatUndeployParams +): Promise { + const { chatId, userId, workspaceId } = params + + const [chatRecord] = await db.select().from(chat).where(eq(chat.id, chatId)).limit(1) + + if (!chatRecord) { + return { success: false, error: 'Chat not found' } + } + + await db.delete(chat).where(eq(chat.id, chatId)) + + logger.info(`Chat "${chatId}" deleted successfully`) + + recordAudit({ + workspaceId: workspaceId || null, + actorId: userId, + action: AuditAction.CHAT_DELETED, + resourceType: AuditResourceType.CHAT, + resourceId: chatId, + resourceName: chatRecord.title || chatId, + description: `Deleted chat deployment "${chatRecord.title || chatId}"`, + }) + + return { success: true } +} diff --git a/apps/sim/lib/workflows/orchestration/deploy.ts b/apps/sim/lib/workflows/orchestration/deploy.ts new file mode 100644 index 00000000000..5e8863ccb0b --- /dev/null +++ b/apps/sim/lib/workflows/orchestration/deploy.ts @@ -0,0 +1,484 @@ +import { db, workflowDeploymentVersion, workflow as workflowTable } from '@sim/db' +import { createLogger } from '@sim/logger' +import { and, eq } from 'drizzle-orm' +import { NextRequest } from 'next/server' +import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log' +import { generateRequestId } from '@/lib/core/utils/request' +import { getBaseUrl } from '@/lib/core/utils/urls' +import { removeMcpToolsForWorkflow, syncMcpToolsForWorkflow } from '@/lib/mcp/workflow-mcp-sync' +import { + cleanupWebhooksForWorkflow, + restorePreviousVersionWebhooks, + saveTriggerWebhooksForDeploy, +} from '@/lib/webhooks/deploy' +import type { OrchestrationErrorCode } from '@/lib/workflows/orchestration/types' +import { + activateWorkflowVersion, + activateWorkflowVersionById, + deployWorkflow, + loadWorkflowFromNormalizedTables, + undeployWorkflow, +} from '@/lib/workflows/persistence/utils' +import { + cleanupDeploymentVersion, + createSchedulesForDeploy, + validateWorkflowSchedules, +} from '@/lib/workflows/schedules' + +const logger = createLogger('DeployOrchestration') + +export interface PerformFullDeployParams { + workflowId: string + userId: string + workflowName?: string + requestId?: string + /** + * Optional NextRequest for external webhook subscriptions. + * If not provided, a synthetic request is constructed from the base URL. + */ + request?: NextRequest + /** + * Override the actor ID used in audit logs and the `deployedBy` field. + * Defaults to `userId`. Use `'admin-api'` for admin-initiated actions. + */ + actorId?: string +} + +export interface PerformFullDeployResult { + success: boolean + deployedAt?: Date + version?: number + deploymentVersionId?: string + error?: string + errorCode?: OrchestrationErrorCode + warnings?: string[] +} + +/** + * Performs a full workflow deployment: creates a deployment version, syncs + * trigger webhooks, creates schedules, cleans up the previous version, and + * syncs MCP tools. Both the deploy API route and the copilot deploy tools + * must use this single function so behaviour stays consistent. + */ +export async function performFullDeploy( + params: PerformFullDeployParams +): Promise { + const { workflowId, userId, workflowName } = params + const actorId = params.actorId ?? userId + const requestId = params.requestId ?? generateRequestId() + const request = params.request ?? new NextRequest(new URL('/api/webhooks', getBaseUrl())) + + const normalizedData = await loadWorkflowFromNormalizedTables(workflowId) + if (!normalizedData) { + return { success: false, error: 'Failed to load workflow state', errorCode: 'not_found' } + } + + const scheduleValidation = validateWorkflowSchedules(normalizedData.blocks) + if (!scheduleValidation.isValid) { + return { + success: false, + error: `Invalid schedule configuration: ${scheduleValidation.error}`, + errorCode: 'validation', + } + } + + const [workflowRecord] = await db + .select() + .from(workflowTable) + .where(eq(workflowTable.id, workflowId)) + .limit(1) + + if (!workflowRecord) { + return { success: false, error: 'Workflow not found', errorCode: 'not_found' } + } + + const workflowData = workflowRecord as Record + + const [currentActiveVersion] = await db + .select({ id: workflowDeploymentVersion.id }) + .from(workflowDeploymentVersion) + .where( + and( + eq(workflowDeploymentVersion.workflowId, workflowId), + eq(workflowDeploymentVersion.isActive, true) + ) + ) + .limit(1) + const previousVersionId = currentActiveVersion?.id + + const rollbackDeployment = async () => { + if (previousVersionId) { + await restorePreviousVersionWebhooks({ + request, + workflow: workflowData, + userId, + previousVersionId, + requestId, + }) + const reactivateResult = await activateWorkflowVersionById({ + workflowId, + deploymentVersionId: previousVersionId, + }) + if (reactivateResult.success) return + } + await undeployWorkflow({ workflowId }) + } + + const deployResult = await deployWorkflow({ + workflowId, + deployedBy: actorId, + workflowName: workflowName || workflowRecord.name || undefined, + }) + + if (!deployResult.success) { + return { success: false, error: deployResult.error || 'Failed to deploy workflow' } + } + + const deployedAt = deployResult.deployedAt! + const deploymentVersionId = deployResult.deploymentVersionId + + if (!deploymentVersionId) { + await undeployWorkflow({ workflowId }) + return { success: false, error: 'Failed to resolve deployment version' } + } + + const triggerSaveResult = await saveTriggerWebhooksForDeploy({ + request, + workflowId, + workflow: workflowData, + userId, + blocks: normalizedData.blocks, + requestId, + deploymentVersionId, + previousVersionId, + }) + + if (!triggerSaveResult.success) { + await cleanupDeploymentVersion({ + workflowId, + workflow: workflowData, + requestId, + deploymentVersionId, + }) + await rollbackDeployment() + return { + success: false, + error: triggerSaveResult.error?.message || 'Failed to save trigger configuration', + } + } + + const scheduleResult = await createSchedulesForDeploy( + workflowId, + normalizedData.blocks, + db, + deploymentVersionId + ) + if (!scheduleResult.success) { + logger.error(`[${requestId}] Failed to create schedule: ${scheduleResult.error}`) + await cleanupDeploymentVersion({ + workflowId, + workflow: workflowData, + requestId, + deploymentVersionId, + }) + await rollbackDeployment() + return { success: false, error: scheduleResult.error || 'Failed to create schedule' } + } + + if (previousVersionId && previousVersionId !== deploymentVersionId) { + try { + await cleanupDeploymentVersion({ + workflowId, + workflow: workflowData, + requestId, + deploymentVersionId: previousVersionId, + skipExternalCleanup: true, + }) + } catch (cleanupError) { + logger.error(`[${requestId}] Failed to clean up previous version`, cleanupError) + } + } + + await syncMcpToolsForWorkflow({ workflowId, requestId, context: 'deploy' }) + + recordAudit({ + workspaceId: (workflowData.workspaceId as string) || null, + actorId: actorId, + action: AuditAction.WORKFLOW_DEPLOYED, + resourceType: AuditResourceType.WORKFLOW, + resourceId: workflowId, + resourceName: (workflowData.name as string) || undefined, + description: `Deployed workflow "${(workflowData.name as string) || workflowId}"`, + metadata: { version: deploymentVersionId }, + request, + }) + + return { + success: true, + deployedAt, + version: deployResult.version, + deploymentVersionId, + warnings: triggerSaveResult.warnings, + } +} + +export interface PerformFullUndeployParams { + workflowId: string + userId: string + requestId?: string + /** Override the actor ID used in audit logs. Defaults to `userId`. */ + actorId?: string +} + +export interface PerformFullUndeployResult { + success: boolean + error?: string +} + +/** + * Performs a full workflow undeploy: marks the workflow as undeployed, cleans up + * webhook records and external subscriptions, removes MCP tools, emits a + * telemetry event, and records an audit log entry. Both the deploy API DELETE + * handler and the copilot undeploy tools must use this single function. + */ +export async function performFullUndeploy( + params: PerformFullUndeployParams +): Promise { + const { workflowId, userId } = params + const actorId = params.actorId ?? userId + const requestId = params.requestId ?? generateRequestId() + + const [workflowRecord] = await db + .select() + .from(workflowTable) + .where(eq(workflowTable.id, workflowId)) + .limit(1) + + if (!workflowRecord) { + return { success: false, error: 'Workflow not found' } + } + + const workflowData = workflowRecord as Record + + const result = await undeployWorkflow({ workflowId }) + if (!result.success) { + return { success: false, error: result.error || 'Failed to undeploy workflow' } + } + + await cleanupWebhooksForWorkflow(workflowId, workflowData, requestId) + await removeMcpToolsForWorkflow(workflowId, requestId) + + logger.info(`[${requestId}] Workflow undeployed successfully: ${workflowId}`) + + try { + const { PlatformEvents } = await import('@/lib/core/telemetry') + PlatformEvents.workflowUndeployed({ workflowId }) + } catch (_e) { + // Telemetry is best-effort + } + + recordAudit({ + workspaceId: (workflowData.workspaceId as string) || null, + actorId: actorId, + action: AuditAction.WORKFLOW_UNDEPLOYED, + resourceType: AuditResourceType.WORKFLOW, + resourceId: workflowId, + resourceName: (workflowData.name as string) || undefined, + description: `Undeployed workflow "${(workflowData.name as string) || workflowId}"`, + }) + + return { success: true } +} + +export interface PerformActivateVersionParams { + workflowId: string + version: number + userId: string + workflow: Record + requestId?: string + request?: NextRequest + /** Override the actor ID used in audit logs. Defaults to `userId`. */ + actorId?: string +} + +export interface PerformActivateVersionResult { + success: boolean + deployedAt?: Date + error?: string + errorCode?: OrchestrationErrorCode + warnings?: string[] +} + +/** + * Activates an existing deployment version: validates schedules, syncs trigger + * webhooks (with forced subscription recreation), creates schedules, activates + * the version, cleans up the previous version, syncs MCP tools, and records + * an audit entry. Both the deployment version PATCH handler and the admin + * activate route must use this function. + */ +export async function performActivateVersion( + params: PerformActivateVersionParams +): Promise { + const { workflowId, version, userId, workflow } = params + const actorId = params.actorId ?? userId + const requestId = params.requestId ?? generateRequestId() + const request = params.request ?? new NextRequest(new URL('/api/webhooks', getBaseUrl())) + + const [versionRow] = await db + .select({ + id: workflowDeploymentVersion.id, + state: workflowDeploymentVersion.state, + }) + .from(workflowDeploymentVersion) + .where( + and( + eq(workflowDeploymentVersion.workflowId, workflowId), + eq(workflowDeploymentVersion.version, version) + ) + ) + .limit(1) + + if (!versionRow?.state) { + return { success: false, error: 'Deployment version not found', errorCode: 'not_found' } + } + + const deployedState = versionRow.state as { blocks?: Record } + const blocks = deployedState.blocks + if (!blocks || typeof blocks !== 'object') { + return { success: false, error: 'Invalid deployed state structure', errorCode: 'validation' } + } + + const [currentActiveVersion] = await db + .select({ id: workflowDeploymentVersion.id }) + .from(workflowDeploymentVersion) + .where( + and( + eq(workflowDeploymentVersion.workflowId, workflowId), + eq(workflowDeploymentVersion.isActive, true) + ) + ) + .limit(1) + const previousVersionId = currentActiveVersion?.id + + const scheduleValidation = validateWorkflowSchedules( + blocks as Record + ) + if (!scheduleValidation.isValid) { + return { + success: false, + error: `Invalid schedule configuration: ${scheduleValidation.error}`, + errorCode: 'validation', + } + } + + const triggerSaveResult = await saveTriggerWebhooksForDeploy({ + request, + workflowId, + workflow, + userId, + blocks: blocks as Record, + requestId, + deploymentVersionId: versionRow.id, + previousVersionId, + forceRecreateSubscriptions: true, + }) + + if (!triggerSaveResult.success) { + if (previousVersionId) { + await restorePreviousVersionWebhooks({ + request, + workflow, + userId, + previousVersionId, + requestId, + }) + } + return { + success: false, + error: triggerSaveResult.error?.message || 'Failed to sync trigger configuration', + } + } + + const scheduleResult = await createSchedulesForDeploy( + workflowId, + blocks as Record, + db, + versionRow.id + ) + + if (!scheduleResult.success) { + await cleanupDeploymentVersion({ + workflowId, + workflow, + requestId, + deploymentVersionId: versionRow.id, + }) + if (previousVersionId) { + await restorePreviousVersionWebhooks({ + request, + workflow, + userId, + previousVersionId, + requestId, + }) + } + return { success: false, error: scheduleResult.error || 'Failed to sync schedules' } + } + + const result = await activateWorkflowVersion({ workflowId, version }) + if (!result.success) { + await cleanupDeploymentVersion({ + workflowId, + workflow, + requestId, + deploymentVersionId: versionRow.id, + }) + if (previousVersionId) { + await restorePreviousVersionWebhooks({ + request, + workflow, + userId, + previousVersionId, + requestId, + }) + } + return { success: false, error: result.error || 'Failed to activate version' } + } + + if (previousVersionId && previousVersionId !== versionRow.id) { + try { + await cleanupDeploymentVersion({ + workflowId, + workflow, + requestId, + deploymentVersionId: previousVersionId, + skipExternalCleanup: true, + }) + } catch (cleanupError) { + logger.error(`[${requestId}] Failed to clean up previous version`, cleanupError) + } + } + + await syncMcpToolsForWorkflow({ + workflowId, + requestId, + state: versionRow.state as { blocks?: Record }, + context: 'activate', + }) + + recordAudit({ + workspaceId: (workflow.workspaceId as string) || null, + actorId: actorId, + action: AuditAction.WORKFLOW_DEPLOYMENT_ACTIVATED, + resourceType: AuditResourceType.WORKFLOW, + resourceId: workflowId, + description: `Activated deployment version ${version}`, + metadata: { version }, + }) + + return { + success: true, + deployedAt: result.deployedAt, + warnings: triggerSaveResult.warnings, + } +} diff --git a/apps/sim/lib/workflows/orchestration/folder-lifecycle.ts b/apps/sim/lib/workflows/orchestration/folder-lifecycle.ts new file mode 100644 index 00000000000..05bc2bc4173 --- /dev/null +++ b/apps/sim/lib/workflows/orchestration/folder-lifecycle.ts @@ -0,0 +1,155 @@ +import { db } from '@sim/db' +import { workflow, workflowFolder } from '@sim/db/schema' +import { createLogger } from '@sim/logger' +import { and, eq, isNull } from 'drizzle-orm' +import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log' +import { archiveWorkflowsByIdsInWorkspace } from '@/lib/workflows/lifecycle' +import type { OrchestrationErrorCode } from '@/lib/workflows/orchestration/types' + +const logger = createLogger('FolderLifecycle') + +/** + * Recursively deletes a folder: removes child folders first, archives non-archived + * workflows in each folder via {@link archiveWorkflowsByIdsInWorkspace}, then deletes + * the folder row. + */ +export async function deleteFolderRecursively( + folderId: string, + workspaceId: string +): Promise<{ folders: number; workflows: number }> { + const stats = { folders: 0, workflows: 0 } + + const childFolders = await db + .select({ id: workflowFolder.id }) + .from(workflowFolder) + .where(and(eq(workflowFolder.parentId, folderId), eq(workflowFolder.workspaceId, workspaceId))) + + for (const childFolder of childFolders) { + const childStats = await deleteFolderRecursively(childFolder.id, workspaceId) + stats.folders += childStats.folders + stats.workflows += childStats.workflows + } + + const workflowsInFolder = await db + .select({ id: workflow.id }) + .from(workflow) + .where( + and( + eq(workflow.folderId, folderId), + eq(workflow.workspaceId, workspaceId), + isNull(workflow.archivedAt) + ) + ) + + if (workflowsInFolder.length > 0) { + await archiveWorkflowsByIdsInWorkspace( + workspaceId, + workflowsInFolder.map((entry) => entry.id), + { requestId: `folder-${folderId}` } + ) + stats.workflows += workflowsInFolder.length + } + + await db.delete(workflowFolder).where(eq(workflowFolder.id, folderId)) + stats.folders += 1 + + return stats +} + +/** + * Counts non-archived workflows in the folder and all descendant folders. + */ +export async function countWorkflowsInFolderRecursively( + folderId: string, + workspaceId: string +): Promise { + let count = 0 + + const workflowsInFolder = await db + .select({ id: workflow.id }) + .from(workflow) + .where( + and( + eq(workflow.folderId, folderId), + eq(workflow.workspaceId, workspaceId), + isNull(workflow.archivedAt) + ) + ) + + count += workflowsInFolder.length + + const childFolders = await db + .select({ id: workflowFolder.id }) + .from(workflowFolder) + .where(and(eq(workflowFolder.parentId, folderId), eq(workflowFolder.workspaceId, workspaceId))) + + for (const childFolder of childFolders) { + count += await countWorkflowsInFolderRecursively(childFolder.id, workspaceId) + } + + return count +} + +/** Parameters for {@link performDeleteFolder}. */ +export interface PerformDeleteFolderParams { + folderId: string + workspaceId: string + userId: string + folderName?: string +} + +/** Outcome of {@link performDeleteFolder}. */ +export interface PerformDeleteFolderResult { + success: boolean + error?: string + errorCode?: OrchestrationErrorCode + deletedItems?: { folders: number; workflows: number } +} + +/** + * Performs a full folder deletion: enforces the last-workflow guard, + * recursively archives child workflows and sub-folders, and records + * an audit entry. Both the folders API DELETE handler and the copilot + * delete_folder tool must use this function. + */ +export async function performDeleteFolder( + params: PerformDeleteFolderParams +): Promise { + const { folderId, workspaceId, userId, folderName } = params + + const workflowsInFolder = await countWorkflowsInFolderRecursively(folderId, workspaceId) + const totalWorkflowsInWorkspace = await db + .select({ id: workflow.id }) + .from(workflow) + .where(and(eq(workflow.workspaceId, workspaceId), isNull(workflow.archivedAt))) + + if (workflowsInFolder > 0 && workflowsInFolder >= totalWorkflowsInWorkspace.length) { + return { + success: false, + error: 'Cannot delete folder containing the only workflow(s) in the workspace', + errorCode: 'validation', + } + } + + const deletionStats = await deleteFolderRecursively(folderId, workspaceId) + + logger.info('Deleted folder and all contents:', { folderId, deletionStats }) + + recordAudit({ + workspaceId, + actorId: userId, + action: AuditAction.FOLDER_DELETED, + resourceType: AuditResourceType.FOLDER, + resourceId: folderId, + resourceName: folderName, + description: `Deleted folder "${folderName || folderId}"`, + metadata: { + affected: { + workflows: deletionStats.workflows, + subfolders: deletionStats.folders - 1, + }, + }, + }) + + return { success: true, deletedItems: deletionStats } +} diff --git a/apps/sim/lib/workflows/orchestration/index.ts b/apps/sim/lib/workflows/orchestration/index.ts new file mode 100644 index 00000000000..76dbc07dc27 --- /dev/null +++ b/apps/sim/lib/workflows/orchestration/index.ts @@ -0,0 +1,30 @@ +export { + type ChatDeployPayload, + type PerformChatDeployResult, + type PerformChatUndeployParams, + type PerformChatUndeployResult, + performChatDeploy, + performChatUndeploy, +} from './chat-deploy' +export { + type PerformActivateVersionParams, + type PerformActivateVersionResult, + type PerformFullDeployParams, + type PerformFullDeployResult, + type PerformFullUndeployParams, + type PerformFullUndeployResult, + performActivateVersion, + performFullDeploy, + performFullUndeploy, +} from './deploy' +export { + type PerformDeleteFolderParams, + type PerformDeleteFolderResult, + performDeleteFolder, +} from './folder-lifecycle' +export type { OrchestrationErrorCode } from './types' +export { + type PerformDeleteWorkflowParams, + type PerformDeleteWorkflowResult, + performDeleteWorkflow, +} from './workflow-lifecycle' diff --git a/apps/sim/lib/workflows/orchestration/types.ts b/apps/sim/lib/workflows/orchestration/types.ts new file mode 100644 index 00000000000..41939c80f74 --- /dev/null +++ b/apps/sim/lib/workflows/orchestration/types.ts @@ -0,0 +1 @@ +export type OrchestrationErrorCode = 'validation' | 'not_found' | 'internal' diff --git a/apps/sim/lib/workflows/orchestration/workflow-lifecycle.ts b/apps/sim/lib/workflows/orchestration/workflow-lifecycle.ts new file mode 100644 index 00000000000..3e757e6b2bf --- /dev/null +++ b/apps/sim/lib/workflows/orchestration/workflow-lifecycle.ts @@ -0,0 +1,118 @@ +import { db } from '@sim/db' +import { templates, workflow } from '@sim/db/schema' +import { createLogger } from '@sim/logger' +import { and, eq, isNull } from 'drizzle-orm' +import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log' +import { generateRequestId } from '@/lib/core/utils/request' +import { archiveWorkflow } from '@/lib/workflows/lifecycle' +import type { OrchestrationErrorCode } from '@/lib/workflows/orchestration/types' + +const logger = createLogger('WorkflowLifecycle') + +export interface PerformDeleteWorkflowParams { + workflowId: string + userId: string + requestId?: string + /** When 'delete', delete published templates. When 'orphan' (default), set their workflowId to null. */ + templateAction?: 'delete' | 'orphan' + /** When true, allows deleting the last workflow in a workspace (used by admin API). */ + skipLastWorkflowGuard?: boolean + /** Override the actor ID used in audit logs. Defaults to `userId`. */ + actorId?: string +} + +export interface PerformDeleteWorkflowResult { + success: boolean + error?: string + errorCode?: OrchestrationErrorCode +} + +/** + * Performs a full workflow deletion: enforces the last-workflow guard, + * handles published templates, archives the workflow via `archiveWorkflow`, + * and records an audit entry. Both the workflow API DELETE handler and the + * copilot delete_workflow tool must use this function. + */ +export async function performDeleteWorkflow( + params: PerformDeleteWorkflowParams +): Promise { + const { workflowId, userId, templateAction = 'orphan', skipLastWorkflowGuard = false } = params + const actorId = params.actorId ?? userId + const requestId = params.requestId ?? generateRequestId() + + const [workflowRecord] = await db + .select() + .from(workflow) + .where(eq(workflow.id, workflowId)) + .limit(1) + + if (!workflowRecord) { + return { success: false, error: 'Workflow not found', errorCode: 'not_found' } + } + + if (!skipLastWorkflowGuard && workflowRecord.workspaceId) { + const totalWorkflows = await db + .select({ id: workflow.id }) + .from(workflow) + .where(and(eq(workflow.workspaceId, workflowRecord.workspaceId), isNull(workflow.archivedAt))) + + if (totalWorkflows.length <= 1) { + return { + success: false, + error: 'Cannot delete the only workflow in the workspace', + errorCode: 'validation', + } + } + } + + try { + const publishedTemplates = await db + .select({ id: templates.id }) + .from(templates) + .where(eq(templates.workflowId, workflowId)) + + if (publishedTemplates.length > 0) { + if (templateAction === 'delete') { + await db.delete(templates).where(eq(templates.workflowId, workflowId)) + logger.info( + `[${requestId}] Deleted ${publishedTemplates.length} templates for workflow ${workflowId}` + ) + } else { + await db + .update(templates) + .set({ workflowId: null }) + .where(eq(templates.workflowId, workflowId)) + logger.info( + `[${requestId}] Orphaned ${publishedTemplates.length} templates for workflow ${workflowId}` + ) + } + } + } catch (templateError) { + logger.warn(`[${requestId}] Failed to handle templates for workflow ${workflowId}`, { + error: templateError, + }) + } + + const archiveResult = await archiveWorkflow(workflowId, { requestId }) + if (!archiveResult.workflow) { + return { success: false, error: 'Workflow not found', errorCode: 'not_found' } + } + + logger.info(`[${requestId}] Successfully archived workflow ${workflowId}`) + + recordAudit({ + workspaceId: workflowRecord.workspaceId || null, + actorId: actorId, + action: AuditAction.WORKFLOW_DELETED, + resourceType: AuditResourceType.WORKFLOW, + resourceId: workflowId, + resourceName: workflowRecord.name, + description: `Archived workflow "${workflowRecord.name}"`, + metadata: { + archived: archiveResult.archived, + templateAction, + }, + }) + + return { success: true } +} diff --git a/apps/sim/lib/workflows/utils.ts b/apps/sim/lib/workflows/utils.ts index bf5cd48e30b..d7838b0ee41 100644 --- a/apps/sim/lib/workflows/utils.ts +++ b/apps/sim/lib/workflows/utils.ts @@ -616,6 +616,36 @@ export async function deleteFolderRecord(folderId: string): Promise { return true } +/** + * Checks whether setting `parentId` as the parent of `folderId` would + * create a circular reference in the folder tree. + */ +export async function checkForCircularReference( + folderId: string, + parentId: string +): Promise { + let currentParentId: string | null = parentId + const visited = new Set() + + while (currentParentId) { + if (visited.has(currentParentId) || currentParentId === folderId) { + return true + } + + visited.add(currentParentId) + + const [parent] = await db + .select({ parentId: workflowFolder.parentId }) + .from(workflowFolder) + .where(eq(workflowFolder.id, currentParentId)) + .limit(1) + + currentParentId = parent?.parentId || null + } + + return false +} + export async function listFolders(workspaceId: string) { return db .select({ diff --git a/packages/testing/src/mocks/audit.mock.ts b/packages/testing/src/mocks/audit.mock.ts index 69b76442895..d44b82df206 100644 --- a/packages/testing/src/mocks/audit.mock.ts +++ b/packages/testing/src/mocks/audit.mock.ts @@ -22,6 +22,8 @@ export const auditMock = { CHAT_DEPLOYED: 'chat.deployed', CHAT_UPDATED: 'chat.updated', CHAT_DELETED: 'chat.deleted', + CREDENTIAL_DELETED: 'credential.deleted', + CREDENTIAL_RENAMED: 'credential.renamed', CREDIT_PURCHASED: 'credit.purchased', CREDENTIAL_SET_CREATED: 'credential_set.created', CREDENTIAL_SET_UPDATED: 'credential_set.updated', @@ -32,6 +34,9 @@ export const auditMock = { CREDENTIAL_SET_INVITATION_ACCEPTED: 'credential_set_invitation.accepted', CREDENTIAL_SET_INVITATION_RESENT: 'credential_set_invitation.resent', CREDENTIAL_SET_INVITATION_REVOKED: 'credential_set_invitation.revoked', + CUSTOM_TOOL_CREATED: 'custom_tool.created', + CUSTOM_TOOL_UPDATED: 'custom_tool.updated', + CUSTOM_TOOL_DELETED: 'custom_tool.deleted', CONNECTOR_DOCUMENT_RESTORED: 'connector_document.restored', CONNECTOR_DOCUMENT_EXCLUDED: 'connector_document.excluded', DOCUMENT_UPLOADED: 'document.uploaded', @@ -85,6 +90,9 @@ export const auditMock = { PERMISSION_GROUP_MEMBER_ADDED: 'permission_group_member.added', PERMISSION_GROUP_MEMBER_REMOVED: 'permission_group_member.removed', SCHEDULE_UPDATED: 'schedule.updated', + SKILL_CREATED: 'skill.created', + SKILL_UPDATED: 'skill.updated', + SKILL_DELETED: 'skill.deleted', TABLE_CREATED: 'table.created', TABLE_UPDATED: 'table.updated', TABLE_DELETED: 'table.deleted', @@ -116,6 +124,7 @@ export const auditMock = { CHAT: 'chat', CONNECTOR: 'connector', CREDENTIAL_SET: 'credential_set', + CUSTOM_TOOL: 'custom_tool', DOCUMENT: 'document', ENVIRONMENT: 'environment', FILE: 'file', @@ -129,6 +138,7 @@ export const auditMock = { PASSWORD: 'password', PERMISSION_GROUP: 'permission_group', SCHEDULE: 'schedule', + SKILL: 'skill', TABLE: 'table', TEMPLATE: 'template', WEBHOOK: 'webhook',