Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
146 changes: 143 additions & 3 deletions apps/sim/app/api/workflows/route.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -11,17 +11,22 @@ const {
mockWorkflowCreated,
mockDbSelect,
mockDbInsert,
mockWorkspaceExists,
mockVerifyWorkspaceMembership,
} = vi.hoisted(() => ({
mockCheckSessionOrInternalAuth: vi.fn(),
mockGetUserEntityPermissions: vi.fn(),
mockWorkflowCreated: vi.fn(),
mockDbSelect: vi.fn(),
mockDbInsert: vi.fn(),
mockWorkspaceExists: vi.fn(),
mockVerifyWorkspaceMembership: vi.fn(),
}))

vi.mock('drizzle-orm', () => ({
...drizzleOrmMock,
min: vi.fn((field) => ({ type: 'min', field })),
count: vi.fn(() => ({ type: 'count' })),
}))

vi.mock('@sim/db', () => ({
Expand Down Expand Up @@ -71,11 +76,11 @@ vi.mock('@/lib/auth/hybrid', () => ({

vi.mock('@/lib/workspaces/permissions/utils', () => ({
getUserEntityPermissions: (...args: unknown[]) => mockGetUserEntityPermissions(...args),
workspaceExists: vi.fn(),
workspaceExists: (...args: unknown[]) => mockWorkspaceExists(...args),
}))

vi.mock('@/app/api/workflows/utils', () => ({
verifyWorkspaceMembership: vi.fn(),
verifyWorkspaceMembership: (...args: unknown[]) => mockVerifyWorkspaceMembership(...args),
}))

vi.mock('@/lib/core/telemetry', () => ({
Expand All @@ -84,7 +89,7 @@ vi.mock('@/lib/core/telemetry', () => ({
},
}))

import { POST } from '@/app/api/workflows/route'
import { GET, POST } from '@/app/api/workflows/route'

describe('Workflows API Route - POST ordering', () => {
beforeEach(() => {
Expand Down Expand Up @@ -171,3 +176,138 @@ describe('Workflows API Route - POST ordering', () => {
expect(insertedValues?.sortOrder).toBe(0)
})
})

describe('Workflows API Route - GET pagination', () => {
beforeEach(() => {
vi.clearAllMocks()

mockCheckSessionOrInternalAuth.mockResolvedValue({
success: true,
userId: 'user-123',
userName: 'Test User',
userEmail: 'test@example.com',
})
mockWorkspaceExists.mockResolvedValue(true)
mockVerifyWorkspaceMembership.mockResolvedValue('member')
})

/**
* Builds a fluent mock chain for db.select() that terminates with the
* given resolved values. The chain supports arbitrary method calls
* (from, where, orderBy, limit, offset) in any order.
*/
function buildSelectChain(resolvedValues: unknown[]) {
const chain: Record<string, unknown> = {}
const self = new Proxy(chain, {
get(_target, prop) {
if (prop === 'then') {
return (resolve: (v: unknown) => void) => resolve(resolvedValues)
}
return vi.fn().mockReturnValue(self)
},
})
return self
}
Comment on lines +199 to +210
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The Proxy-based mock doesn't capture arguments passed to .limit() and .offset(). This means the tests confirm the response shape is correct but don't verify that the actual pagination parameters were forwarded to the database queries.

For example, if the route silently ignored the limit param and always used the default, the test would still pass (if the mock data happens to be the right size). Consider capturing these arguments to add stronger assertions:

Suggested change
function buildSelectChain(resolvedValues: unknown[]) {
const chain: Record<string, unknown> = {}
const self = new Proxy(chain, {
get(_target, prop) {
if (prop === 'then') {
return (resolve: (v: unknown) => void) => resolve(resolvedValues)
}
return vi.fn().mockReturnValue(self)
},
})
return self
}
function buildSelectChain(resolvedValues: unknown[]) {
const chain: Record<string, unknown> = {}
const limitSpy = vi.fn().mockReturnValue(chain)
const offsetSpy = vi.fn().mockReturnValue(chain)
const self = new Proxy(chain, {
get(_target, prop) {
if (prop === 'then') {
return (resolve: (v: unknown) => void) => resolve(resolvedValues)
}
if (prop === 'limit') return limitSpy
if (prop === 'offset') return offsetSpy
return vi.fn().mockReturnValue(self)
},
})
return Object.assign(self, { limitSpy, offsetSpy })
}
// Then in tests:
const chain = buildSelectChain([{ count: 5 }])
// ... after calling GET ...
expect(chain.limitSpy).toHaveBeenCalledWith(1)
expect(chain.offsetSpy).toHaveBeenCalledWith(2)


it('returns pagination metadata with workspace workflows', async () => {
const mockWorkflows = [
{ id: 'wf-1', name: 'Workflow 1', workspaceId: 'ws-1' },
{ id: 'wf-2', name: 'Workflow 2', workspaceId: 'ws-1' },
]

const selectCalls: unknown[][] = []
mockDbSelect.mockImplementation((...args: unknown[]) => {
selectCalls.push(args)
if (selectCalls.length === 1) {
return buildSelectChain([{ count: 2 }])
}
return buildSelectChain(mockWorkflows)
})

const req = createMockRequest(
'GET',
undefined,
{},
'http://localhost:3000/api/workflows?workspaceId=ws-1'
)

const response = await GET(req as any)
const json = await response.json()

expect(response.status).toBe(200)
expect(json.data).toHaveLength(2)
expect(json.pagination).toBeDefined()
expect(json.pagination.total).toBe(2)
expect(json.pagination.limit).toBe(200)
expect(json.pagination.offset).toBe(0)
expect(json.pagination.hasMore).toBe(false)
})

it('respects custom limit and offset params', async () => {
const mockWorkflows = [{ id: 'wf-1', name: 'Workflow 1', workspaceId: 'ws-1' }]

const selectCalls: unknown[][] = []
mockDbSelect.mockImplementation((...args: unknown[]) => {
selectCalls.push(args)
if (selectCalls.length === 1) {
return buildSelectChain([{ count: 5 }])
}
return buildSelectChain(mockWorkflows)
})

const req = createMockRequest(
'GET',
undefined,
{},
'http://localhost:3000/api/workflows?workspaceId=ws-1&limit=1&offset=2'
)

const response = await GET(req as any)
const json = await response.json()

expect(response.status).toBe(200)
expect(json.pagination.limit).toBe(1)
expect(json.pagination.offset).toBe(2)
expect(json.pagination.total).toBe(5)
expect(json.pagination.hasMore).toBe(true)
})

it('clamps limit to MAX_PAGE_LIMIT', async () => {
const selectCalls: unknown[][] = []
mockDbSelect.mockImplementation((...args: unknown[]) => {
selectCalls.push(args)
if (selectCalls.length === 1) {
return buildSelectChain([{ count: 0 }])
}
return buildSelectChain([])
})

const req = createMockRequest(
'GET',
undefined,
{},
'http://localhost:3000/api/workflows?workspaceId=ws-1&limit=9999'
)

const response = await GET(req as any)
const json = await response.json()

expect(response.status).toBe(200)
expect(json.pagination.limit).toBe(500)
})

it('returns pagination in empty workspace response for no-workspace query', async () => {
mockDbSelect.mockImplementation(() => buildSelectChain([]))

const req = createMockRequest('GET', undefined, {}, 'http://localhost:3000/api/workflows')

const response = await GET(req as any)
const json = await response.json()

expect(response.status).toBe(200)
expect(json.data).toEqual([])
expect(json.pagination).toBeDefined()
expect(json.pagination.total).toBe(0)
expect(json.pagination.hasMore).toBe(false)
})
})
73 changes: 60 additions & 13 deletions apps/sim/app/api/workflows/route.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import { db } from '@sim/db'
import { permissions, workflow, workflowFolder } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { and, asc, eq, inArray, isNull, min } from 'drizzle-orm'
import { and, asc, count, eq, inArray, isNull, min } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
Expand All @@ -12,6 +12,9 @@ import { verifyWorkspaceMembership } from '@/app/api/workflows/utils'

const logger = createLogger('WorkflowAPI')

const DEFAULT_PAGE_LIMIT = 200
const MAX_PAGE_LIMIT = 500

const CreateWorkflowSchema = z.object({
name: z.string().min(1, 'Name is required'),
description: z.string().optional().default(''),
Expand All @@ -28,6 +31,14 @@ export async function GET(request: NextRequest) {
const url = new URL(request.url)
const workspaceId = url.searchParams.get('workspaceId')

const rawLimit = url.searchParams.get('limit')
const rawOffset = url.searchParams.get('offset')
const limit = Math.min(
Math.max(1, rawLimit ? Number(rawLimit) : DEFAULT_PAGE_LIMIT),
MAX_PAGE_LIMIT
)
const offset = Math.max(0, rawOffset ? Number(rawOffset) : 0)
Comment on lines +36 to +40
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Non-numeric limit/offset query params will cause a 500 error instead of being rejected gracefully.

When a client passes ?limit=abc, Number('abc') returns NaN, which propagates through Math.max and Math.min unchanged (both return NaN when given NaN as input). Drizzle then receives NaN for the LIMIT clause, causing a database error.

Add validation to ensure both params are finite numbers before clamping:

Suggested change
const limit = Math.min(
Math.max(1, rawLimit ? Number(rawLimit) : DEFAULT_PAGE_LIMIT),
MAX_PAGE_LIMIT
)
const offset = Math.max(0, rawOffset ? Number(rawOffset) : 0)
const rawLimit = url.searchParams.get('limit')
const rawOffset = url.searchParams.get('offset')
const parsedLimit = rawLimit !== null ? Number(rawLimit) : DEFAULT_PAGE_LIMIT
const parsedOffset = rawOffset !== null ? Number(rawOffset) : 0
const limit = Number.isFinite(parsedLimit)
? Math.min(Math.max(1, parsedLimit), MAX_PAGE_LIMIT)
: DEFAULT_PAGE_LIMIT
const offset = Number.isFinite(parsedOffset) ? Math.max(0, parsedOffset) : 0

Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

NaN propagates to SQL when limit/offset are non-numeric

Medium Severity

When limit or offset query params contain non-numeric strings (e.g. ?limit=abc), Number("abc") returns NaN, which propagates through Math.max and Math.min (both return NaN when any argument is NaN). The resulting NaN is passed to drizzle's .limit() and .offset(), which can produce a database error or bypass the limit protection entirely. The existing parsePaginationParams in apps/sim/app/api/v1/admin/types.ts already handles this correctly with explicit Number.isNaN checks and fallback to defaults.

Fix in Cursor Fix in Web

Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Duplicates existing parsePaginationParams utility without NaN safety

Low Severity

The pagination parameter parsing logic here reimplements what parsePaginationParams in apps/sim/app/api/v1/admin/types.ts already does — extracting limit and offset from URL search params with clamping and defaults — but without the NaN safety that utility includes. Reusing or adapting the existing utility would avoid the divergence and the NaN bug.

Fix in Cursor Fix in Web


try {
const auth = await checkSessionOrInternalAuth(request, { requireWorkflowId: false })
if (!auth.success || !auth.userId) {
Expand Down Expand Up @@ -63,32 +74,68 @@ export async function GET(request: NextRequest) {
}

let workflows
let total: number

const orderByClause = [asc(workflow.sortOrder), asc(workflow.createdAt), asc(workflow.id)]

if (workspaceId) {
workflows = await db
.select()
.from(workflow)
.where(eq(workflow.workspaceId, workspaceId))
.orderBy(...orderByClause)
const whereCondition = eq(workflow.workspaceId, workspaceId)

const [countResult, workflowRows] = await Promise.all([
db.select({ count: count() }).from(workflow).where(whereCondition),
db
.select()
.from(workflow)
.where(whereCondition)
.orderBy(...orderByClause)
.limit(limit)
.offset(offset),
])

total = countResult[0]?.count ?? 0
workflows = workflowRows
} else {
const workspacePermissionRows = await db
.select({ workspaceId: permissions.entityId })
.from(permissions)
.where(and(eq(permissions.userId, userId), eq(permissions.entityType, 'workspace')))
const workspaceIds = workspacePermissionRows.map((row) => row.workspaceId)
if (workspaceIds.length === 0) {
return NextResponse.json({ data: [] }, { status: 200 })
return NextResponse.json(
{ data: [], pagination: { total: 0, limit, offset, hasMore: false } },
{ status: 200 }
)
}
workflows = await db
.select()
.from(workflow)
.where(inArray(workflow.workspaceId, workspaceIds))
.orderBy(...orderByClause)

const whereCondition = inArray(workflow.workspaceId, workspaceIds)

const [countResult, workflowRows] = await Promise.all([
db.select({ count: count() }).from(workflow).where(whereCondition),
db
.select()
.from(workflow)
.where(whereCondition)
.orderBy(...orderByClause)
.limit(limit)
.offset(offset),
])

total = countResult[0]?.count ?? 0
workflows = workflowRows
}

return NextResponse.json({ data: workflows }, { status: 200 })
return NextResponse.json(
{
data: workflows,
pagination: {
total,
limit,
offset,
hasMore: offset + workflows.length < total,
},
},
{ status: 200 }
)
} catch (error: any) {
const elapsed = Date.now() - startTime
logger.error(`[${requestId}] Workflow fetch error after ${elapsed}ms`, error)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ import {
sanitizePathSegment,
type WorkflowExportData,
} from '@/lib/workflows/operations/import-export'
import { fetchAllPages } from '@/hooks/queries/utils/paginated-fetch'

const logger = createLogger('useExportWorkspace')

Expand All @@ -32,11 +33,9 @@ export function useExportWorkspace({ onSuccess }: UseExportWorkspaceProps = {})
try {
logger.info('Exporting workspace', { workspaceId })

const workflowsResponse = await fetch(`/api/workflows?workspaceId=${workspaceId}`)
if (!workflowsResponse.ok) {
throw new Error('Failed to fetch workflows')
}
const { data: workflows } = await workflowsResponse.json()
const workflows = await fetchAllPages<Record<string, any>>(
`/api/workflows?workspaceId=${workspaceId}`
)

const foldersResponse = await fetch(`/api/folders?workspaceId=${workspaceId}`)
if (!foldersResponse.ok) {
Expand Down
39 changes: 39 additions & 0 deletions apps/sim/hooks/queries/utils/paginated-fetch.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
/**
* Fetches all pages from a paginated API endpoint.
*
* The endpoint is expected to return `{ data: T[], pagination: { hasMore: boolean } }`.
* Pages are fetched sequentially until `hasMore` is `false`.
*
* @param baseUrl - Base URL including any existing query params (e.g. `/api/workflows?workspaceId=ws-1`)
* @param pageSize - Number of items per page (default 200)
* @returns All items concatenated across pages
*/
const MAX_PAGES = 100

export async function fetchAllPages<T>(baseUrl: string, pageSize = 200): Promise<T[]> {
const allItems: T[] = []
let offset = 0
let pages = 0
const separator = baseUrl.includes('?') ? '&' : '?'

while (pages < MAX_PAGES) {
const response = await fetch(`${baseUrl}${separator}limit=${pageSize}&offset=${offset}`)

if (!response.ok) {
throw new Error(`Failed to fetch from ${baseUrl}: ${response.statusText}`)
}

const json = await response.json()
const data: T[] = Array.isArray(json.data) ? json.data : []
allItems.push(...data)

if (!json.pagination?.hasMore || data.length === 0) {
break
}

offset += pageSize
pages++
}

return allItems
}
9 changes: 2 additions & 7 deletions apps/sim/hooks/queries/workflow-mcp-servers.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import { createLogger } from '@sim/logger'
import { keepPreviousData, useMutation, useQuery, useQueryClient } from '@tanstack/react-query'
import { fetchAllPages } from '@/hooks/queries/utils/paginated-fetch'

const logger = createLogger('WorkflowMcpServerQueries')

Expand Down Expand Up @@ -445,13 +446,7 @@ export function useDeleteWorkflowMcpTool() {
* Fetch deployed workflows for a workspace
*/
async function fetchDeployedWorkflows(workspaceId: string): Promise<DeployedWorkflow[]> {
const response = await fetch(`/api/workflows?workspaceId=${workspaceId}`)

if (!response.ok) {
throw new Error('Failed to fetch workflows')
}

const { data }: { data: any[] } = await response.json()
const data = await fetchAllPages<Record<string, any>>(`/api/workflows?workspaceId=${workspaceId}`)

return data
.filter((w) => w.isDeployed)
Expand Down
Loading