diff --git a/apps/sim/app/api/workflows/route.ts b/apps/sim/app/api/workflows/route.ts index 5664c2bc22..e7b603c7c3 100644 --- a/apps/sim/app/api/workflows/route.ts +++ b/apps/sim/app/api/workflows/route.ts @@ -118,18 +118,18 @@ export async function POST(req: NextRequest) { logger.info(`[${requestId}] Creating workflow ${workflowId} for user ${session.user.id}`) - // Track workflow creation - try { - const { trackPlatformEvent } = await import('@/lib/telemetry/tracer') - trackPlatformEvent('platform.workflow.created', { - 'workflow.id': workflowId, - 'workflow.name': name, - 'workflow.has_workspace': !!workspaceId, - 'workflow.has_folder': !!folderId, + import('@/lib/telemetry/tracer') + .then(({ trackPlatformEvent }) => { + trackPlatformEvent('platform.workflow.created', { + 'workflow.id': workflowId, + 'workflow.name': name, + 'workflow.has_workspace': !!workspaceId, + 'workflow.has_folder': !!folderId, + }) + }) + .catch(() => { + // Silently fail }) - } catch (_e) { - // Silently fail - } await db.insert(workflow).values({ id: workflowId, diff --git a/apps/sim/app/workspace/[workspaceId]/logs/components/sidebar/sidebar.tsx b/apps/sim/app/workspace/[workspaceId]/logs/components/sidebar/sidebar.tsx index 2b9cba09a6..a696842a3a 100644 --- a/apps/sim/app/workspace/[workspaceId]/logs/components/sidebar/sidebar.tsx +++ b/apps/sim/app/workspace/[workspaceId]/logs/components/sidebar/sidebar.tsx @@ -23,6 +23,7 @@ import '@/components/emcn/components/code/code.css' interface LogSidebarProps { log: WorkflowLog | null isOpen: boolean + isLoadingDetails?: boolean onClose: () => void onNavigateNext?: () => void onNavigatePrev?: () => void @@ -192,6 +193,7 @@ const BlockContentDisplay = ({ export function Sidebar({ log, isOpen, + isLoadingDetails = false, onClose, onNavigateNext, onNavigatePrev, @@ -219,15 +221,6 @@ export function Sidebar({ } }, [log?.id]) - const isLoadingDetails = useMemo(() => { - if (!log) return false - // Only show while we expect details to arrive (has executionId) - if (!log.executionId) return false - const hasEnhanced = !!log.executionData?.enhanced - const hasAnyDetails = hasEnhanced || !!log.cost || Array.isArray(log.executionData?.traceSpans) - return !hasAnyDetails - }, [log]) - const formattedContent = useMemo(() => { if (!log) return null diff --git a/apps/sim/app/workspace/[workspaceId]/logs/dashboard.tsx b/apps/sim/app/workspace/[workspaceId]/logs/dashboard.tsx index 7f8cfc7eac..7400eb4379 100644 --- a/apps/sim/app/workspace/[workspaceId]/logs/dashboard.tsx +++ b/apps/sim/app/workspace/[workspaceId]/logs/dashboard.tsx @@ -3,7 +3,6 @@ import { useCallback, useEffect, useMemo, useRef, useState } from 'react' import { Loader2 } from 'lucide-react' import { useParams, useRouter, useSearchParams } from 'next/navigation' -import { createLogger } from '@/lib/logs/console/logger' import { soehne } from '@/app/fonts/soehne/soehne' import Controls from '@/app/workspace/[workspaceId]/logs/components/dashboard/controls' import KPIs from '@/app/workspace/[workspaceId]/logs/components/dashboard/kpis' @@ -11,12 +10,15 @@ import WorkflowDetails from '@/app/workspace/[workspaceId]/logs/components/dashb import WorkflowsList from '@/app/workspace/[workspaceId]/logs/components/dashboard/workflows-list' import Timeline from '@/app/workspace/[workspaceId]/logs/components/filters/components/timeline' import { mapToExecutionLog, mapToExecutionLogAlt } from '@/app/workspace/[workspaceId]/logs/utils' +import { + useExecutionsMetrics, + useGlobalDashboardLogs, + useWorkflowDashboardLogs, +} from '@/hooks/queries/logs' import { formatCost } from '@/providers/utils' import { useFilterStore } from '@/stores/logs/filters/store' import { useWorkflowRegistry } from '@/stores/workflows/registry/store' -const logger = createLogger('Dashboard') - type TimeFilter = '30m' | '1h' | '6h' | '12h' | '24h' | '3d' | '7d' | '14d' | '30d' interface WorkflowExecution { @@ -59,15 +61,6 @@ interface ExecutionLog { workflowColor?: string } -interface WorkflowDetailsDataLocal { - errorRates: { timestamp: string; value: number }[] - durations: { timestamp: string; value: number }[] - executionCounts: { timestamp: string; value: number }[] - logs: ExecutionLog[] - allLogs: ExecutionLog[] - __meta?: { offset: number; hasMore: boolean } -} - export default function Dashboard() { const params = useParams() const workspaceId = params.workspaceId as string @@ -99,23 +92,7 @@ export default function Dashboard() { } } const [endTime, setEndTime] = useState(new Date()) - const [executions, setExecutions] = useState([]) - const [loading, setLoading] = useState(true) - const [isRefetching, setIsRefetching] = useState(false) - const [error, setError] = useState(null) const [expandedWorkflowId, setExpandedWorkflowId] = useState(null) - const [workflowDetails, setWorkflowDetails] = useState>( - {} - ) - const [globalDetails, setGlobalDetails] = useState(null) - const [globalLogsMeta, setGlobalLogsMeta] = useState<{ offset: number; hasMore: boolean }>({ - offset: 0, - hasMore: true, - }) - const [globalLoadingMore, setGlobalLoadingMore] = useState(false) - const [aggregateSegments, setAggregateSegments] = useState< - { timestamp: string; totalExecutions: number; successfulExecutions: number }[] - >([]) const [selectedSegments, setSelectedSegments] = useState>({}) const [lastAnchorIndices, setLastAnchorIndices] = useState>({}) const [searchQuery, setSearchQuery] = useState('') @@ -135,6 +112,134 @@ export default function Dashboard() { const timeFilter = getTimeFilterFromRange(sidebarTimeRange) + const getStartTime = useCallback(() => { + const start = new Date(endTime) + + switch (timeFilter) { + case '30m': + start.setMinutes(endTime.getMinutes() - 30) + break + case '1h': + start.setHours(endTime.getHours() - 1) + break + case '6h': + start.setHours(endTime.getHours() - 6) + break + case '12h': + start.setHours(endTime.getHours() - 12) + break + case '24h': + start.setHours(endTime.getHours() - 24) + break + case '3d': + start.setDate(endTime.getDate() - 3) + break + case '7d': + start.setDate(endTime.getDate() - 7) + break + case '14d': + start.setDate(endTime.getDate() - 14) + break + case '30d': + start.setDate(endTime.getDate() - 30) + break + default: + start.setHours(endTime.getHours() - 24) + } + + return start + }, [endTime, timeFilter]) + + const metricsFilters = useMemo( + () => ({ + workspaceId, + segments: segmentCount || DEFAULT_SEGMENTS, + startTime: getStartTime().toISOString(), + endTime: endTime.toISOString(), + workflowIds: workflowIds.length > 0 ? workflowIds : undefined, + folderIds: folderIds.length > 0 ? folderIds : undefined, + triggers: triggers.length > 0 ? triggers : undefined, + }), + [workspaceId, segmentCount, getStartTime, endTime, workflowIds, folderIds, triggers] + ) + + const logsFilters = useMemo( + () => ({ + workspaceId, + startDate: getStartTime().toISOString(), + endDate: endTime.toISOString(), + workflowIds: workflowIds.length > 0 ? workflowIds : undefined, + folderIds: folderIds.length > 0 ? folderIds : undefined, + triggers: triggers.length > 0 ? triggers : undefined, + limit: 50, + }), + [workspaceId, getStartTime, endTime, workflowIds, folderIds, triggers] + ) + + const metricsQuery = useExecutionsMetrics(metricsFilters, { + enabled: Boolean(workspaceId), + }) + + const globalLogsQuery = useGlobalDashboardLogs(logsFilters, { + enabled: Boolean(workspaceId), + }) + + const workflowLogsQuery = useWorkflowDashboardLogs(expandedWorkflowId ?? undefined, logsFilters, { + enabled: Boolean(workspaceId) && Boolean(expandedWorkflowId), + }) + + const executions = metricsQuery.data?.workflows ?? [] + const aggregateSegments = metricsQuery.data?.aggregateSegments ?? [] + const loading = metricsQuery.isLoading + const isRefetching = metricsQuery.isFetching && !metricsQuery.isLoading + const error = metricsQuery.error?.message ?? null + + const globalLogs = useMemo(() => { + if (!globalLogsQuery.data?.pages) return [] + return globalLogsQuery.data.pages.flatMap((page) => page.logs).map(mapToExecutionLog) + }, [globalLogsQuery.data?.pages]) + + const workflowLogs = useMemo(() => { + if (!workflowLogsQuery.data?.pages) return [] + return workflowLogsQuery.data.pages.flatMap((page) => page.logs).map(mapToExecutionLogAlt) + }, [workflowLogsQuery.data?.pages]) + + const globalDetails = useMemo(() => { + if (!aggregateSegments.length) return null + + const errorRates = aggregateSegments.map((s) => ({ + timestamp: s.timestamp, + value: s.totalExecutions > 0 ? (1 - s.successfulExecutions / s.totalExecutions) * 100 : 0, + })) + + const executionCounts = aggregateSegments.map((s) => ({ + timestamp: s.timestamp, + value: s.totalExecutions, + })) + + return { + errorRates, + durations: [], + executionCounts, + logs: globalLogs, + allLogs: globalLogs, + } + }, [aggregateSegments, globalLogs]) + + const workflowDetails = useMemo(() => { + if (!expandedWorkflowId || !workflowLogs.length) return {} + + return { + [expandedWorkflowId]: { + errorRates: [], + durations: [], + executionCounts: [], + logs: workflowLogs, + allLogs: workflowLogs, + }, + } + }, [expandedWorkflowId, workflowLogs]) + useEffect(() => { const urlView = searchParams.get('view') if (urlView === 'dashboard' || urlView === 'logs') { @@ -190,362 +295,24 @@ export default function Dashboard() { } }, [executions]) - const getStartTime = useCallback(() => { - const start = new Date(endTime) - - switch (timeFilter) { - case '30m': - start.setMinutes(endTime.getMinutes() - 30) - break - case '1h': - start.setHours(endTime.getHours() - 1) - break - case '6h': - start.setHours(endTime.getHours() - 6) - break - case '12h': - start.setHours(endTime.getHours() - 12) - break - case '24h': - start.setHours(endTime.getHours() - 24) - break - case '3d': - start.setDate(endTime.getDate() - 3) - break - case '7d': - start.setDate(endTime.getDate() - 7) - break - case '14d': - start.setDate(endTime.getDate() - 14) - break - case '30d': - start.setDate(endTime.getDate() - 30) - break - default: - start.setHours(endTime.getHours() - 24) - } - - return start - }, [endTime, timeFilter]) - - const fetchExecutions = useCallback( - async (isInitialLoad = false) => { - try { - if (isInitialLoad) { - setLoading(true) - } else { - setIsRefetching(true) - } - setError(null) - - const startTime = getStartTime() - const params = new URLSearchParams({ - segments: String(segmentCount || DEFAULT_SEGMENTS), - startTime: startTime.toISOString(), - endTime: endTime.toISOString(), - }) - - if (workflowIds.length > 0) { - params.set('workflowIds', workflowIds.join(',')) - } - - if (folderIds.length > 0) { - params.set('folderIds', folderIds.join(',')) - } - - if (triggers.length > 0) { - params.set('triggers', triggers.join(',')) - } - - const response = await fetch( - `/api/workspaces/${workspaceId}/metrics/executions?${params.toString()}` - ) - - if (!response.ok) { - throw new Error('Failed to fetch execution history') - } - - const data = await response.json() - const mapped: WorkflowExecution[] = (data.workflows || []).map((wf: any) => { - const segments = (wf.segments || []).map((s: any) => { - const total = s.totalExecutions || 0 - const success = s.successfulExecutions || 0 - const hasExecutions = total > 0 - const successRate = hasExecutions ? (success / total) * 100 : 100 - return { - timestamp: s.timestamp, - hasExecutions, - totalExecutions: total, - successfulExecutions: success, - successRate, - avgDurationMs: typeof s.avgDurationMs === 'number' ? s.avgDurationMs : 0, - p50Ms: typeof s.p50Ms === 'number' ? s.p50Ms : 0, - p90Ms: typeof s.p90Ms === 'number' ? s.p90Ms : 0, - p99Ms: typeof s.p99Ms === 'number' ? s.p99Ms : 0, - } - }) - const totals = segments.reduce( - (acc: { total: number; success: number }, seg: (typeof segments)[number]) => { - acc.total += seg.totalExecutions - acc.success += seg.successfulExecutions - return acc - }, - { total: 0, success: 0 } - ) - const overallSuccessRate = totals.total > 0 ? (totals.success / totals.total) * 100 : 100 - return { - workflowId: wf.workflowId, - workflowName: wf.workflowName, - segments, - overallSuccessRate, - } as WorkflowExecution - }) - const sortedWorkflows = mapped.sort((a, b) => { - const errA = a.overallSuccessRate < 100 ? 1 - a.overallSuccessRate / 100 : 0 - const errB = b.overallSuccessRate < 100 ? 1 - b.overallSuccessRate / 100 : 0 - return errB - errA - }) - setExecutions(sortedWorkflows) - - const segmentsCount: number = Number(params.get('segments') || DEFAULT_SEGMENTS) - const agg: { timestamp: string; totalExecutions: number; successfulExecutions: number }[] = - Array.from({ length: segmentsCount }, (_, i) => { - const base = startTime.getTime() - const ts = new Date(base + Math.floor((i * (endTime.getTime() - base)) / segmentsCount)) - return { - timestamp: ts.toISOString(), - totalExecutions: 0, - successfulExecutions: 0, - } - }) - for (const wf of data.workflows as any[]) { - wf.segments.forEach((s: any, i: number) => { - const index = Math.min(i, segmentsCount - 1) - agg[index].totalExecutions += s.totalExecutions || 0 - agg[index].successfulExecutions += s.successfulExecutions || 0 - }) - } - setAggregateSegments(agg) - - const errorRates = agg.map((s) => ({ - timestamp: s.timestamp, - value: s.totalExecutions > 0 ? (1 - s.successfulExecutions / s.totalExecutions) * 100 : 0, - })) - const executionCounts = agg.map((s) => ({ - timestamp: s.timestamp, - value: s.totalExecutions, - })) - - const logsParams = new URLSearchParams({ - limit: '50', - offset: '0', - workspaceId, - startDate: startTime.toISOString(), - endDate: endTime.toISOString(), - order: 'desc', - details: 'full', - }) - if (workflowIds.length > 0) logsParams.set('workflowIds', workflowIds.join(',')) - if (folderIds.length > 0) logsParams.set('folderIds', folderIds.join(',')) - if (triggers.length > 0) logsParams.set('triggers', triggers.join(',')) - - const logsResponse = await fetch(`/api/logs?${logsParams.toString()}`) - let mappedLogs: ExecutionLog[] = [] - if (logsResponse.ok) { - const logsData = await logsResponse.json() - mappedLogs = (logsData.data || []).map(mapToExecutionLog) - } - - setGlobalDetails({ - errorRates, - durations: [], - executionCounts, - logs: mappedLogs, - allLogs: mappedLogs, - }) - setGlobalLogsMeta({ offset: mappedLogs.length, hasMore: mappedLogs.length === 50 }) - } catch (err) { - logger.error('Error fetching executions:', err) - setError(err instanceof Error ? err.message : 'An error occurred') - } finally { - setLoading(false) - setIsRefetching(false) - } - }, - [workspaceId, timeFilter, endTime, getStartTime, workflowIds, folderIds, triggers, segmentCount] - ) - - const fetchWorkflowDetails = useCallback( - async (workflowId: string, silent = false) => { - try { - const startTime = getStartTime() - const params = new URLSearchParams({ - startTime: startTime.toISOString(), - endTime: endTime.toISOString(), - }) - - if (triggers.length > 0) { - params.set('triggers', triggers.join(',')) - } - - const response = await fetch( - `/api/logs?${new URLSearchParams({ - limit: '50', - offset: '0', - workspaceId, - startDate: startTime.toISOString(), - endDate: endTime.toISOString(), - order: 'desc', - details: 'full', - workflowIds: workflowId, - ...(triggers.length > 0 ? { triggers: triggers.join(',') } : {}), - }).toString()}` - ) - - if (!response.ok) { - throw new Error('Failed to fetch workflow details') - } - - const data = await response.json() - const mappedLogs: ExecutionLog[] = (data.data || []).map(mapToExecutionLogAlt) - - setWorkflowDetails((prev) => ({ - ...prev, - [workflowId]: { - errorRates: [], - durations: [], - executionCounts: [], - logs: mappedLogs, - allLogs: mappedLogs, - __meta: { offset: mappedLogs.length, hasMore: (data.data || []).length === 50 }, - }, - })) - } catch (err) { - logger.error('Error fetching workflow details:', err) - } - }, - [workspaceId, endTime, getStartTime, triggers] - ) - - // Infinite scroll for details logs const loadMoreLogs = useCallback( - async (workflowId: string) => { - const details = (workflowDetails as any)[workflowId] - if (!details) return - if (details.__loading) return - if (!details.__meta?.hasMore) return - try { - // mark loading to prevent duplicate fetches - setWorkflowDetails((prev) => ({ - ...prev, - [workflowId]: { ...(prev as any)[workflowId], __loading: true }, - })) - const startTime = getStartTime() - const offset = details.__meta.offset || 0 - const qp = new URLSearchParams({ - limit: '50', - offset: String(offset), - workspaceId, - startDate: startTime.toISOString(), - endDate: endTime.toISOString(), - order: 'desc', - details: 'full', - workflowIds: workflowId, - }) - if (triggers.length > 0) qp.set('triggers', triggers.join(',')) - const res = await fetch(`/api/logs?${qp.toString()}`) - if (!res.ok) return - const data = await res.json() - const more: ExecutionLog[] = (data.data || []).map(mapToExecutionLogAlt) - - setWorkflowDetails((prev) => { - const cur = prev[workflowId] - const seen = new Set() - const dedup = [...(cur?.allLogs || []), ...more].filter((x) => { - const id = x.id - if (seen.has(id)) return false - seen.add(id) - return true - }) - return { - ...prev, - [workflowId]: { - ...cur, - logs: dedup, - allLogs: dedup, - __meta: { - offset: (cur?.__meta?.offset || 0) + more.length, - hasMore: more.length === 50, - }, - __loading: false, - }, - } - }) - } catch { - setWorkflowDetails((prev) => ({ - ...prev, - [workflowId]: { ...(prev as any)[workflowId], __loading: false }, - })) + (workflowId: string) => { + if ( + workflowId === expandedWorkflowId && + workflowLogsQuery.hasNextPage && + !workflowLogsQuery.isFetchingNextPage + ) { + workflowLogsQuery.fetchNextPage() } }, - [workspaceId, endTime, getStartTime, triggers, workflowDetails] + [expandedWorkflowId, workflowLogsQuery] ) - const loadMoreGlobalLogs = useCallback(async () => { - if (!globalDetails || !globalLogsMeta.hasMore) return - if (globalLoadingMore) return - try { - setGlobalLoadingMore(true) - const startTime = getStartTime() - const qp = new URLSearchParams({ - limit: '50', - offset: String(globalLogsMeta.offset || 0), - workspaceId, - startDate: startTime.toISOString(), - endDate: endTime.toISOString(), - order: 'desc', - details: 'full', - }) - if (workflowIds.length > 0) qp.set('workflowIds', workflowIds.join(',')) - if (folderIds.length > 0) qp.set('folderIds', folderIds.join(',')) - if (triggers.length > 0) qp.set('triggers', triggers.join(',')) - - const res = await fetch(`/api/logs?${qp.toString()}`) - if (!res.ok) return - const data = await res.json() - const more: ExecutionLog[] = (data.data || []).map(mapToExecutionLog) - - setGlobalDetails((prev) => { - if (!prev) return prev - const seen = new Set() - const dedup = [...prev.allLogs, ...more].filter((x) => { - const id = x.id - if (seen.has(id)) return false - seen.add(id) - return true - }) - return { ...prev, logs: dedup, allLogs: dedup } - }) - setGlobalLogsMeta((m) => ({ - offset: (m.offset || 0) + more.length, - hasMore: more.length === 50, - })) - } catch { - // ignore - } finally { - setGlobalLoadingMore(false) + const loadMoreGlobalLogs = useCallback(() => { + if (globalLogsQuery.hasNextPage && !globalLogsQuery.isFetchingNextPage) { + globalLogsQuery.fetchNextPage() } - }, [ - globalDetails, - globalLogsMeta, - globalLoadingMore, - workspaceId, - endTime, - getStartTime, - workflowIds, - folderIds, - triggers, - ]) + }, [globalLogsQuery]) const toggleWorkflow = useCallback( (workflowId: string) => { @@ -553,12 +320,9 @@ export default function Dashboard() { setExpandedWorkflowId(null) } else { setExpandedWorkflowId(workflowId) - if (!workflowDetails[workflowId]) { - fetchWorkflowDetails(workflowId) - } } }, - [expandedWorkflowId, workflowDetails, fetchWorkflowDetails] + [expandedWorkflowId] ) const handleSegmentClick = useCallback( @@ -568,13 +332,7 @@ export default function Dashboard() { _timestamp: string, mode: 'single' | 'toggle' | 'range' ) => { - // Fetch workflow details if not already loaded - if (!workflowDetails[workflowId]) { - fetchWorkflowDetails(workflowId) - } - if (mode === 'toggle') { - // Toggle mode: Add/remove segment from selection, allowing cross-workflow selection setSelectedSegments((prev) => { const currentSegments = prev[workflowId] || [] const exists = currentSegments.includes(segmentIndex) @@ -584,7 +342,6 @@ export default function Dashboard() { if (nextSegments.length === 0) { const { [workflowId]: _, ...rest } = prev - // If this was the only workflow with selections, clear expanded if (Object.keys(rest).length === 0) { setExpandedWorkflowId(null) } @@ -593,7 +350,6 @@ export default function Dashboard() { const newState = { ...prev, [workflowId]: nextSegments } - // Set to multi-workflow mode if multiple workflows have selections const selectedWorkflowIds = Object.keys(newState) if (selectedWorkflowIds.length > 1) { setExpandedWorkflowId('__multi__') @@ -606,27 +362,23 @@ export default function Dashboard() { setLastAnchorIndices((prev) => ({ ...prev, [workflowId]: segmentIndex })) } else if (mode === 'single') { - // Single mode: Select this segment, or deselect if already selected setSelectedSegments((prev) => { const currentSegments = prev[workflowId] || [] const isOnlySelectedSegment = currentSegments.length === 1 && currentSegments[0] === segmentIndex const isOnlyWorkflowSelected = Object.keys(prev).length === 1 && prev[workflowId] - // If this is the only selected segment in the only selected workflow, deselect it if (isOnlySelectedSegment && isOnlyWorkflowSelected) { setExpandedWorkflowId(null) setLastAnchorIndices({}) return {} } - // Otherwise, select only this segment setExpandedWorkflowId(workflowId) setLastAnchorIndices({ [workflowId]: segmentIndex }) return { [workflowId]: [segmentIndex] } }) } else if (mode === 'range') { - // Range mode: Expand selection within the current workflow if (expandedWorkflowId === workflowId) { setSelectedSegments((prev) => { const currentSegments = prev[workflowId] || [] @@ -638,31 +390,15 @@ export default function Dashboard() { return { ...prev, [workflowId]: Array.from(union).sort((a, b) => a - b) } }) } else { - // If clicking range on a different workflow, treat as single click setExpandedWorkflowId(workflowId) setSelectedSegments({ [workflowId]: [segmentIndex] }) setLastAnchorIndices({ [workflowId]: segmentIndex }) } } }, - [expandedWorkflowId, workflowDetails, fetchWorkflowDetails, lastAnchorIndices] + [expandedWorkflowId, workflowDetails, lastAnchorIndices] ) - const isInitialMount = useRef(true) - useEffect(() => { - const isInitial = isInitialMount.current - if (isInitial) { - isInitialMount.current = false - } - fetchExecutions(isInitial) - }, [workspaceId, timeFilter, endTime, workflowIds, folderIds, triggers, segmentCount]) - - useEffect(() => { - if (expandedWorkflowId) { - fetchWorkflowDetails(expandedWorkflowId) - } - }, [expandedWorkflowId, timeFilter, endTime, workflowIds, folderIds, fetchWorkflowDetails]) - useEffect(() => { setSelectedSegments({}) setLastAnchorIndices({}) @@ -692,68 +428,15 @@ export default function Dashboard() { } }, []) - const getShiftLabel = () => { - switch (sidebarTimeRange) { - case 'Past 30 minutes': - return '30 minutes' - case 'Past hour': - return 'hour' - case 'Past 12 hours': - return '12 hours' - case 'Past 24 hours': - return '24 hours' - default: - return 'period' - } - } - const getDateRange = () => { const start = getStartTime() return `${start.toLocaleDateString('en-US', { month: 'short', day: 'numeric', hour: 'numeric', minute: '2-digit' })} - ${endTime.toLocaleDateString('en-US', { month: 'short', day: 'numeric', hour: 'numeric', minute: '2-digit', year: 'numeric' })}` } - const shiftTimeWindow = (direction: 'back' | 'forward') => { - let shift: number - switch (timeFilter) { - case '30m': - shift = 30 * 60 * 1000 - break - case '1h': - shift = 60 * 60 * 1000 - break - case '6h': - shift = 6 * 60 * 60 * 1000 - break - case '12h': - shift = 12 * 60 * 60 * 1000 - break - case '24h': - shift = 24 * 60 * 60 * 1000 - break - case '3d': - shift = 3 * 24 * 60 * 60 * 1000 - break - case '7d': - shift = 7 * 24 * 60 * 60 * 1000 - break - case '14d': - shift = 14 * 24 * 60 * 60 * 1000 - break - case '30d': - shift = 30 * 24 * 60 * 60 * 1000 - break - default: - shift = 24 * 60 * 60 * 1000 - } - - setEndTime((prev) => new Date(prev.getTime() + (direction === 'forward' ? shift : -shift))) - } - const resetToNow = () => { setEndTime(new Date()) } - const isLive = endTime.getTime() > Date.now() - 60000 // Within last minute const [live, setLive] = useState(false) useEffect(() => { @@ -768,8 +451,6 @@ export default function Dashboard() { } }, [live]) - // Infinite scroll is now handled inside WorkflowDetails - return (
@@ -873,25 +554,21 @@ export default function Dashboard() { {/* Details section in its own scroll area */}
{(() => { - // Handle multi-workflow selection view if (expandedWorkflowId === '__multi__') { const selectedWorkflowIds = Object.keys(selectedSegments) const totalMs = endTime.getTime() - getStartTime().getTime() const segMs = totalMs / Math.max(1, segmentCount) - // Collect all unique segment indices across all workflows const allSegmentIndices = new Set() for (const indices of Object.values(selectedSegments)) { indices.forEach((idx) => allSegmentIndices.add(idx)) } const sortedIndices = Array.from(allSegmentIndices).sort((a, b) => a - b) - // Aggregate logs from all selected workflows/segments const allLogs: any[] = [] let totalExecutions = 0 let totalSuccess = 0 - // Build aggregated chart series const aggregatedSegments: Array<{ timestamp: string totalExecutions: number @@ -900,9 +577,7 @@ export default function Dashboard() { durationCount: number }> = [] - // Initialize aggregated segments for each unique index for (const idx of sortedIndices) { - // Get the timestamp from the first workflow that has this index let timestamp = '' for (const wfId of selectedWorkflowIds) { const wf = executions.find((w) => w.workflowId === wfId) @@ -921,7 +596,6 @@ export default function Dashboard() { }) } - // Aggregate data from all workflows for (const wfId of selectedWorkflowIds) { const wf = executions.find((w) => w.workflowId === wfId) const details = workflowDetails[wfId] @@ -929,7 +603,6 @@ export default function Dashboard() { if (!wf || !details || indices.length === 0) continue - // Calculate time windows for this workflow's selected segments const windows = indices .map((idx) => wf.segments[idx]) .filter(Boolean) @@ -944,7 +617,6 @@ export default function Dashboard() { const inAnyWindow = (t: number) => windows.some((w) => t >= w.start && t < w.end) - // Filter logs for this workflow's selected segments const workflowLogs = details.allLogs .filter((log) => inAnyWindow(new Date(log.startedAt).getTime())) .map((log) => ({ @@ -956,7 +628,6 @@ export default function Dashboard() { allLogs.push(...workflowLogs) - // Aggregate segment metrics indices.forEach((idx) => { const segment = wf.segments[idx] if (!segment) return @@ -974,7 +645,6 @@ export default function Dashboard() { }) } - // Build chart series const errorRates = aggregatedSegments.map((seg) => ({ timestamp: seg.timestamp, value: @@ -993,7 +663,6 @@ export default function Dashboard() { value: seg.durationCount > 0 ? seg.avgDurationMs / seg.durationCount : 0, })) - // Sort logs by time (most recent first) allLogs.sort( (a, b) => new Date(b.startedAt).getTime() - new Date(a.startedAt).getTime() ) @@ -1002,13 +671,11 @@ export default function Dashboard() { const totalRate = totalExecutions > 0 ? (totalSuccess / totalExecutions) * 100 : 100 - // Calculate overall time range across all selected workflows let multiWorkflowTimeRange: { start: Date; end: Date } | null = null if (sortedIndices.length > 0) { const firstIdx = sortedIndices[0] const lastIdx = sortedIndices[sortedIndices.length - 1] - // Find earliest start time let earliestStart: Date | null = null for (const wfId of selectedWorkflowIds) { const wf = executions.find((w) => w.workflowId === wfId) @@ -1021,7 +688,6 @@ export default function Dashboard() { } } - // Find latest end time let latestEnd: Date | null = null for (const wfId of selectedWorkflowIds) { const wf = executions.find((w) => w.workflowId === wfId) @@ -1042,7 +708,6 @@ export default function Dashboard() { } } - // Get workflow names const workflowNames = selectedWorkflowIds .map((id) => executions.find((w) => w.workflowId === id)?.workflowName) .filter(Boolean) as string[] @@ -1179,33 +844,25 @@ export default function Dashboard() { ...log, workflowName: (log as any).workflowName || wf.workflowName, })) - - // Build series from selected segments indices - const idxSet = new Set(workflowSelectedIndices) - const selectedSegs = wf.segments.filter((_, i) => idxSet.has(i)) - ;(details as any).__filtered = buildSeriesFromSegments(selectedSegs as any) - } else if (details) { - // Clear filtered data when no segments are selected - ;(details as any).__filtered = undefined } + // Compute series data based on selected segments or all segments + const segmentsToUse = + workflowSelectedIndices.length > 0 + ? wf.segments.filter((_, i) => workflowSelectedIndices.includes(i)) + : wf.segments + const series = buildSeriesFromSegments(segmentsToUse as any) + const detailsWithFilteredLogs = details ? { ...details, logs: logsToDisplay, - ...(() => { - const series = - (details as any).__filtered || - buildSeriesFromSegments(wf.segments as any) - return { - errorRates: series.errorRates, - durations: series.durations, - executionCounts: series.executionCounts, - durationP50: series.durationP50, - durationP90: series.durationP90, - durationP99: series.durationP99, - } - })(), + errorRates: series.errorRates, + durations: series.durations, + executionCounts: series.executionCounts, + durationP50: series.durationP50, + durationP90: series.durationP90, + durationP99: series.durationP99, } : undefined @@ -1261,8 +918,8 @@ export default function Dashboard() { }} formatCost={formatCost} onLoadMore={() => loadMoreLogs(expandedWorkflowId)} - hasMore={(workflowDetails as any)[expandedWorkflowId]?.__meta?.hasMore} - isLoadingMore={(workflowDetails as any)[expandedWorkflowId]?.__loading} + hasMore={workflowLogsQuery.hasNextPage ?? false} + isLoadingMore={workflowLogsQuery.isFetchingNextPage} /> ) } @@ -1297,8 +954,8 @@ export default function Dashboard() { }} formatCost={formatCost} onLoadMore={loadMoreGlobalLogs} - hasMore={globalLogsMeta.hasMore} - isLoadingMore={globalLoadingMore} + hasMore={globalLogsQuery.hasNextPage ?? false} + isLoadingMore={globalLogsQuery.isFetchingNextPage} /> ) })()} diff --git a/apps/sim/app/workspace/[workspaceId]/logs/logs.tsx b/apps/sim/app/workspace/[workspaceId]/logs/logs.tsx index add261d653..901f3dc9c4 100644 --- a/apps/sim/app/workspace/[workspaceId]/logs/logs.tsx +++ b/apps/sim/app/workspace/[workspaceId]/logs/logs.tsx @@ -1,10 +1,9 @@ 'use client' -import { useCallback, useEffect, useRef, useState } from 'react' +import { useCallback, useEffect, useMemo, useRef, useState } from 'react' import { AlertCircle, ArrowUpRight, Info, Loader2 } from 'lucide-react' import Link from 'next/link' import { useParams } from 'next/navigation' -import { createLogger } from '@/lib/logs/console/logger' import { parseQuery, queryToApiParams } from '@/lib/logs/query-parser' import { cn } from '@/lib/utils' import Controls from '@/app/workspace/[workspaceId]/logs/components/dashboard/controls' @@ -13,12 +12,12 @@ import { Sidebar } from '@/app/workspace/[workspaceId]/logs/components/sidebar/s import Dashboard from '@/app/workspace/[workspaceId]/logs/dashboard' import { formatDate } from '@/app/workspace/[workspaceId]/logs/utils' import { useFolders } from '@/hooks/queries/folders' +import { useLogDetail, useLogsList } from '@/hooks/queries/logs' import { useDebounce } from '@/hooks/use-debounce' import { useFolderStore } from '@/stores/folders/store' import { useFilterStore } from '@/stores/logs/filters/store' -import type { LogsResponse, WorkflowLog } from '@/stores/logs/filters/types' +import type { WorkflowLog } from '@/stores/logs/filters/types' -const logger = createLogger('Logs') const LOGS_PER_PAGE = 50 /** @@ -63,19 +62,7 @@ export default function Logs() { const workspaceId = params.workspaceId as string const { - logs, - loading, - error, - setLogs, - setLoading, - setError, setWorkspaceId, - page, - setPage, - hasMore, - setHasMore, - isFetchingMore, - setIsFetchingMore, initializeFromURL, timeRange, level, @@ -95,10 +82,6 @@ export default function Logs() { const [selectedLog, setSelectedLog] = useState(null) const [selectedLogIndex, setSelectedLogIndex] = useState(-1) const [isSidebarOpen, setIsSidebarOpen] = useState(false) - const [isDetailsLoading, setIsDetailsLoading] = useState(false) - const detailsCacheRef = useRef>(new Map()) - const detailsAbortRef = useRef(null) - const currentDetailsIdRef = useRef(null) const selectedRowRef = useRef(null) const loaderRef = useRef(null) const scrollContainerRef = useRef(null) @@ -107,16 +90,37 @@ export default function Logs() { const [searchQuery, setSearchQuery] = useState(storeSearchQuery) const debouncedSearchQuery = useDebounce(searchQuery, 300) - const [availableWorkflows, setAvailableWorkflows] = useState([]) - const [availableFolders, setAvailableFolders] = useState([]) + const [, setAvailableWorkflows] = useState([]) + const [, setAvailableFolders] = useState([]) - // Live and refresh state const [isLive, setIsLive] = useState(false) - const [isRefreshing, setIsRefreshing] = useState(false) - const liveIntervalRef = useRef(null) const isSearchOpenRef = useRef(false) - // Sync local search query with store search query + const logFilters = useMemo( + () => ({ + timeRange, + level, + workflowIds, + folderIds, + triggers, + searchQuery: debouncedSearchQuery, + limit: LOGS_PER_PAGE, + }), + [timeRange, level, workflowIds, folderIds, triggers, debouncedSearchQuery] + ) + + const logsQuery = useLogsList(workspaceId, logFilters, { + enabled: Boolean(workspaceId) && isInitialized.current, + refetchInterval: isLive ? 5000 : false, + }) + + const logDetailQuery = useLogDetail(selectedLog?.id) + + const logs = useMemo(() => { + if (!logsQuery.data?.pages) return [] + return logsQuery.data.pages.flatMap((page) => page.logs) + }, [logsQuery.data?.pages]) + useEffect(() => { setSearchQuery(storeSearchQuery) }, [storeSearchQuery]) @@ -182,62 +186,6 @@ export default function Logs() { const index = logs.findIndex((l) => l.id === log.id) setSelectedLogIndex(index) setIsSidebarOpen(true) - setIsDetailsLoading(true) - - const currentId = log.id - const prevId = index > 0 ? logs[index - 1]?.id : undefined - const nextId = index < logs.length - 1 ? logs[index + 1]?.id : undefined - - if (detailsAbortRef.current) { - try { - detailsAbortRef.current.abort() - } catch { - /* no-op */ - } - } - const controller = new AbortController() - detailsAbortRef.current = controller - currentDetailsIdRef.current = currentId - - const idsToFetch: Array<{ id: string; merge: boolean }> = [] - const cachedCurrent = currentId ? detailsCacheRef.current.get(currentId) : undefined - if (currentId && !cachedCurrent) idsToFetch.push({ id: currentId, merge: true }) - if (prevId && !detailsCacheRef.current.has(prevId)) - idsToFetch.push({ id: prevId, merge: false }) - if (nextId && !detailsCacheRef.current.has(nextId)) - idsToFetch.push({ id: nextId, merge: false }) - - if (cachedCurrent) { - setSelectedLog((prev) => - prev && prev.id === currentId - ? ({ ...(prev as any), ...(cachedCurrent as any) } as any) - : prev - ) - setIsDetailsLoading(false) - } - if (idsToFetch.length === 0) return - - Promise.all( - idsToFetch.map(async ({ id, merge }) => { - try { - const res = await fetch(`/api/logs/${id}`, { signal: controller.signal }) - if (!res.ok) return - const body = await res.json() - const detailed = body?.data - if (detailed) { - detailsCacheRef.current.set(id, detailed) - if (merge && id === currentId) { - setSelectedLog((prev) => - prev && prev.id === id ? ({ ...(prev as any), ...(detailed as any) } as any) : prev - ) - if (currentDetailsIdRef.current === id) setIsDetailsLoading(false) - } - } - } catch (e: any) { - if (e?.name === 'AbortError') return - } - }) - ).catch(() => {}) } const handleNavigateNext = useCallback(() => { @@ -246,54 +194,6 @@ export default function Logs() { setSelectedLogIndex(nextIndex) const nextLog = logs[nextIndex] setSelectedLog(nextLog) - if (detailsAbortRef.current) { - try { - detailsAbortRef.current.abort() - } catch { - /* no-op */ - } - } - const controller = new AbortController() - detailsAbortRef.current = controller - - const cached = detailsCacheRef.current.get(nextLog.id) - if (cached) { - setSelectedLog((prev) => - prev && prev.id === nextLog.id ? ({ ...(prev as any), ...(cached as any) } as any) : prev - ) - } else { - const prevId = nextIndex > 0 ? logs[nextIndex - 1]?.id : undefined - const afterId = nextIndex < logs.length - 1 ? logs[nextIndex + 1]?.id : undefined - const idsToFetch: Array<{ id: string; merge: boolean }> = [] - if (nextLog.id && !detailsCacheRef.current.has(nextLog.id)) - idsToFetch.push({ id: nextLog.id, merge: true }) - if (prevId && !detailsCacheRef.current.has(prevId)) - idsToFetch.push({ id: prevId, merge: false }) - if (afterId && !detailsCacheRef.current.has(afterId)) - idsToFetch.push({ id: afterId, merge: false }) - Promise.all( - idsToFetch.map(async ({ id, merge }) => { - try { - const res = await fetch(`/api/logs/${id}`, { signal: controller.signal }) - if (!res.ok) return - const body = await res.json() - const detailed = body?.data - if (detailed) { - detailsCacheRef.current.set(id, detailed) - if (merge && id === nextLog.id) { - setSelectedLog((prev) => - prev && prev.id === id - ? ({ ...(prev as any), ...(detailed as any) } as any) - : prev - ) - } - } - } catch (e: any) { - if (e?.name === 'AbortError') return - } - }) - ).catch(() => {}) - } } }, [selectedLogIndex, logs]) @@ -303,54 +203,6 @@ export default function Logs() { setSelectedLogIndex(prevIndex) const prevLog = logs[prevIndex] setSelectedLog(prevLog) - if (detailsAbortRef.current) { - try { - detailsAbortRef.current.abort() - } catch { - /* no-op */ - } - } - const controller = new AbortController() - detailsAbortRef.current = controller - - const cached = detailsCacheRef.current.get(prevLog.id) - if (cached) { - setSelectedLog((prev) => - prev && prev.id === prevLog.id ? ({ ...(prev as any), ...(cached as any) } as any) : prev - ) - } else { - const beforeId = prevIndex > 0 ? logs[prevIndex - 1]?.id : undefined - const afterId = prevIndex < logs.length - 1 ? logs[prevIndex + 1]?.id : undefined - const idsToFetch: Array<{ id: string; merge: boolean }> = [] - if (prevLog.id && !detailsCacheRef.current.has(prevLog.id)) - idsToFetch.push({ id: prevLog.id, merge: true }) - if (beforeId && !detailsCacheRef.current.has(beforeId)) - idsToFetch.push({ id: beforeId, merge: false }) - if (afterId && !detailsCacheRef.current.has(afterId)) - idsToFetch.push({ id: afterId, merge: false }) - Promise.all( - idsToFetch.map(async ({ id, merge }) => { - try { - const res = await fetch(`/api/logs/${id}`, { signal: controller.signal }) - if (!res.ok) return - const body = await res.json() - const detailed = body?.data - if (detailed) { - detailsCacheRef.current.set(id, detailed) - if (merge && id === prevLog.id) { - setSelectedLog((prev) => - prev && prev.id === id - ? ({ ...(prev as any), ...(detailed as any) } as any) - : prev - ) - } - } - } catch (e: any) { - if (e?.name === 'AbortError') return - } - }) - ).catch(() => {}) - } } }, [selectedLogIndex, logs]) @@ -369,104 +221,11 @@ export default function Logs() { } }, [selectedLogIndex]) - const fetchLogs = useCallback(async (pageNum: number, append = false) => { - try { - // Don't fetch if workspaceId is not set - const { workspaceId: storeWorkspaceId } = useFilterStore.getState() - if (!storeWorkspaceId) { - return - } - - if (pageNum === 1) { - setLoading(true) - } else { - setIsFetchingMore(true) - } - - const { buildQueryParams: getCurrentQueryParams } = useFilterStore.getState() - const queryParams = getCurrentQueryParams(pageNum, LOGS_PER_PAGE) - - const { searchQuery: currentSearchQuery } = useFilterStore.getState() - const parsedQuery = parseQuery(currentSearchQuery) - const enhancedParams = queryToApiParams(parsedQuery) - - const allParams = new URLSearchParams(queryParams) - Object.entries(enhancedParams).forEach(([key, value]) => { - if (key === 'triggers' && allParams.has('triggers')) { - const existingTriggers = allParams.get('triggers')?.split(',') || [] - const searchTriggers = value.split(',') - const combined = [...new Set([...existingTriggers, ...searchTriggers])] - allParams.set('triggers', combined.join(',')) - } else { - allParams.set(key, value) - } - }) - - allParams.set('details', 'basic') - const response = await fetch(`/api/logs?${allParams.toString()}`) - - if (!response.ok) { - throw new Error(`Error fetching logs: ${response.statusText}`) - } - - const data: LogsResponse = await response.json() - - setHasMore(data.data.length === LOGS_PER_PAGE && data.page < data.totalPages) - - setLogs(data.data, append) - - setError(null) - } catch (err) { - logger.error('Failed to fetch logs:', { err }) - setError(err instanceof Error ? err.message : 'An unknown error occurred') - } finally { - if (pageNum === 1) { - setLoading(false) - } else { - setIsFetchingMore(false) - } - } - }, []) - const handleRefresh = async () => { - if (isRefreshing) return - - setIsRefreshing(true) - - try { - await fetchLogs(1) - setError(null) - } catch (err) { - setError(err instanceof Error ? err.message : 'An unknown error occurred') - } finally { - setIsRefreshing(false) - } - } - - // Setup or clear the live refresh interval when isLive changes - useEffect(() => { - if (liveIntervalRef.current) { - clearInterval(liveIntervalRef.current) - liveIntervalRef.current = null - } - - if (isLive) { - handleRefresh() - liveIntervalRef.current = setInterval(() => { - handleRefresh() - }, 5000) - } - - return () => { - if (liveIntervalRef.current) { - clearInterval(liveIntervalRef.current) - liveIntervalRef.current = null - } + await logsQuery.refetch() + if (selectedLog?.id) { + await logDetailQuery.refetch() } - }, [isLive]) - - const toggleLive = () => { - setIsLive(!isLive) } const handleExport = async () => { @@ -506,101 +265,14 @@ export default function Logs() { return () => window.removeEventListener('popstate', handlePopState) }, [initializeFromURL]) - useEffect(() => { - if (!isInitialized.current) { - return - } - - // Don't fetch if workspaceId is not set yet - if (!workspaceId) { - return - } - - setPage(1) - setHasMore(true) - - const fetchWithFilters = async () => { - try { - setLoading(true) - - const params = new URLSearchParams() - params.set('details', 'basic') - params.set('limit', LOGS_PER_PAGE.toString()) - params.set('offset', '0') // Always start from page 1 - params.set('workspaceId', workspaceId) - - const parsedQuery = parseQuery(debouncedSearchQuery) - const enhancedParams = queryToApiParams(parsedQuery) - - if (level !== 'all') params.set('level', level) - if (triggers.length > 0) params.set('triggers', triggers.join(',')) - if (workflowIds.length > 0) params.set('workflowIds', workflowIds.join(',')) - if (folderIds.length > 0) params.set('folderIds', folderIds.join(',')) - - Object.entries(enhancedParams).forEach(([key, value]) => { - if (key === 'triggers' && params.has('triggers')) { - const storeTriggers = params.get('triggers')?.split(',') || [] - const searchTriggers = value.split(',') - const combined = [...new Set([...storeTriggers, ...searchTriggers])] - params.set('triggers', combined.join(',')) - } else { - params.set(key, value) - } - }) - - if (timeRange !== 'All time') { - const now = new Date() - let startDate: Date - switch (timeRange) { - case 'Past 30 minutes': - startDate = new Date(now.getTime() - 30 * 60 * 1000) - break - case 'Past hour': - startDate = new Date(now.getTime() - 60 * 60 * 1000) - break - case 'Past 24 hours': - startDate = new Date(now.getTime() - 24 * 60 * 60 * 1000) - break - default: - startDate = new Date(0) - } - params.set('startDate', startDate.toISOString()) - } - - const response = await fetch(`/api/logs?${params.toString()}`) - - if (!response.ok) { - throw new Error(`Error fetching logs: ${response.statusText}`) - } - - const data: LogsResponse = await response.json() - setHasMore(data.data.length === LOGS_PER_PAGE && data.page < data.totalPages) - setLogs(data.data, false) - setError(null) - } catch (err) { - logger.error('Failed to fetch logs:', { err }) - setError(err instanceof Error ? err.message : 'An unknown error occurred') - } finally { - setLoading(false) - } - } - - fetchWithFilters() - }, [workspaceId, timeRange, level, workflowIds, folderIds, debouncedSearchQuery, triggers]) - const loadMoreLogs = useCallback(() => { - if (!isFetchingMore && hasMore) { - const nextPage = page + 1 - setPage(nextPage) - setIsFetchingMore(true) - setTimeout(() => { - fetchLogs(nextPage, true) - }, 50) + if (!logsQuery.isFetching && logsQuery.hasNextPage) { + logsQuery.fetchNextPage() } - }, [fetchLogs, isFetchingMore, hasMore, page]) + }, [logsQuery]) useEffect(() => { - if (loading || !hasMore) return + if (logsQuery.isLoading || !logsQuery.hasNextPage) return const scrollContainer = scrollContainerRef.current if (!scrollContainer) return @@ -612,7 +284,7 @@ export default function Logs() { const scrollPercentage = (scrollTop / (scrollHeight - clientHeight)) * 100 - if (scrollPercentage > 60 && !isFetchingMore && hasMore) { + if (scrollPercentage > 60 && !logsQuery.isFetchingNextPage && logsQuery.hasNextPage) { loadMoreLogs() } } @@ -622,13 +294,14 @@ export default function Logs() { return () => { scrollContainer.removeEventListener('scroll', handleScroll) } - }, [loading, hasMore, isFetchingMore, loadMoreLogs]) + }, [logsQuery.isLoading, logsQuery.hasNextPage, logsQuery.isFetchingNextPage, loadMoreLogs]) useEffect(() => { const currentLoaderRef = loaderRef.current const scrollContainer = scrollContainerRef.current - if (!currentLoaderRef || !scrollContainer || loading || !hasMore) return + if (!currentLoaderRef || !scrollContainer || logsQuery.isLoading || !logsQuery.hasNextPage) + return const observer = new IntersectionObserver( (entries) => { @@ -636,7 +309,7 @@ export default function Logs() { if (!e?.isIntersecting) return const { scrollTop, scrollHeight, clientHeight } = scrollContainer const pct = (scrollTop / (scrollHeight - clientHeight)) * 100 - if (pct > 70 && !isFetchingMore) { + if (pct > 70 && !logsQuery.isFetchingNextPage) { loadMoreLogs() } }, @@ -652,7 +325,7 @@ export default function Logs() { return () => { observer.unobserve(currentLoaderRef) } - }, [loading, hasMore, isFetchingMore, loadMoreLogs]) + }, [logsQuery.isLoading, logsQuery.hasNextPage, logsQuery.isFetchingNextPage, loadMoreLogs]) useEffect(() => { const handleKeyDown = (e: KeyboardEvent) => { @@ -686,7 +359,6 @@ export default function Logs() { return () => window.removeEventListener('keydown', handleKeyDown) }, [logs, selectedLogIndex, isSidebarOpen, selectedLog, handleNavigateNext, handleNavigatePrev]) - // If in dashboard mode, show the dashboard if (viewMode === 'dashboard') { return } @@ -701,7 +373,7 @@ export default function Logs() {
setIsLive(fn)} @@ -750,18 +422,20 @@ export default function Logs() { {/* Table body - scrollable */}
- {loading && page === 1 ? ( + {logsQuery.isLoading && !logsQuery.data ? (
Loading logs...
- ) : error ? ( + ) : logsQuery.isError ? (
- Error: {error} + + Error: {logsQuery.error?.message || 'Failed to load logs'} +
) : logs.length === 0 ? ( @@ -778,7 +452,6 @@ export default function Logs() { const isSelected = selectedLog?.id === log.id const baseLevel = (log.level || 'info').toLowerCase() const isError = baseLevel === 'error' - // If it's an error, don't treat it as pending even if hasPendingPause is true const isPending = !isError && log.hasPendingPause === true const statusLabel = isPending ? 'Pending' @@ -906,13 +579,13 @@ export default function Logs() { })} {/* Infinite scroll loader */} - {hasMore && ( + {logsQuery.hasNextPage && (
- {isFetchingMore ? ( + {logsQuery.isFetchingNextPage ? ( <> Loading more... @@ -932,8 +605,9 @@ export default function Logs() { {/* Log Sidebar */} (null) + const fileInputRef = useRef(null) const { activeTab, setActiveTab, panelWidth, _hasHydrated, setHasHydrated } = usePanelStore() const copilotRef = useRef<{ createNewChat: () => void @@ -77,6 +79,7 @@ export function Panel() { // Hooks const userPermissions = useUserPermissionsContext() + const { isImporting, handleFileChange } = useImportWorkflow({ workspaceId }) const { workflows, activeWorkflowId, @@ -262,6 +265,14 @@ export function Panel() { workspaceId, ]) + /** + * Handles triggering file input for workflow import + */ + const handleImportWorkflow = useCallback(() => { + setIsMenuOpen(false) + fileInputRef.current?.click() + }, []) + // Compute run button state const canRun = userPermissions.canRead // Running only requires read permissions const isLoadingPermissions = userPermissions.isLoading @@ -314,7 +325,7 @@ export function Panel() { { setVariablesOpen(!isVariablesOpen)}> - + Variables } @@ -331,7 +342,14 @@ export function Panel() { disabled={isExporting || !currentWorkflow} > - Export JSON + Export workflow + + + + Import workflow + + {/* Hidden file input for workflow import */} + ) } diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/workflow.tsx b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/workflow.tsx index f47e7732dd..71eb76a46c 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/workflow.tsx +++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/workflow.tsx @@ -116,8 +116,7 @@ const WorkflowContent = React.memo(() => { // Get workspace ID from the params const workspaceId = params.workspaceId as string - const { workflows, activeWorkflowId, isLoading, setActiveWorkflow, createWorkflow } = - useWorkflowRegistry() + const { workflows, activeWorkflowId, isLoading, setActiveWorkflow } = useWorkflowRegistry() // Use the clean abstraction for current workflow state const currentWorkflow = useCurrentWorkflow() diff --git a/apps/sim/app/workspace/[workspaceId]/w/components/sidebar/components-new/settings-modal/components/subscription/components/cancel-subscription/cancel-subscription.tsx b/apps/sim/app/workspace/[workspaceId]/w/components/sidebar/components-new/settings-modal/components/subscription/components/cancel-subscription/cancel-subscription.tsx index c11b6b5d68..38696c88f6 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/components/sidebar/components-new/settings-modal/components/subscription/components/cancel-subscription/cancel-subscription.tsx +++ b/apps/sim/app/workspace/[workspaceId]/w/components/sidebar/components-new/settings-modal/components/subscription/components/cancel-subscription/cancel-subscription.tsx @@ -227,12 +227,8 @@ export function CancelSubscription({ subscription, subscriptionData }: CancelSub onClick={() => setIsDialogOpen(true)} disabled={isLoading} className={cn( - 'h-8 rounded-[8px] font-medium text-xs transition-all duration-200', - error - ? 'border-red-500 text-red-500 dark:border-red-500 dark:text-red-500' - : isCancelAtPeriodEnd - ? 'text-muted-foreground hover:border-green-500 hover:bg-green-500 hover:text-white dark:hover:border-green-500 dark:hover:bg-green-500' - : 'text-muted-foreground hover:border-red-500 hover:bg-red-500 hover:text-white dark:hover:border-red-500 dark:hover:bg-red-500' + 'h-8 rounded-[8px] font-medium text-xs', + error && 'border-red-500 text-red-500 dark:border-red-500 dark:text-red-500' )} > {error ? 'Error' : isCancelAtPeriodEnd ? 'Restore' : 'Manage'} diff --git a/apps/sim/app/workspace/[workspaceId]/w/components/sidebar/components-new/settings-modal/components/subscription/components/plan-card/plan-card.tsx b/apps/sim/app/workspace/[workspaceId]/w/components/sidebar/components-new/settings-modal/components/subscription/components/plan-card/plan-card.tsx index a8599da299..46009c821a 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/components/sidebar/components-new/settings-modal/components/subscription/components/plan-card/plan-card.tsx +++ b/apps/sim/app/workspace/[workspaceId]/w/components/sidebar/components-new/settings-modal/components/subscription/components/plan-card/plan-card.tsx @@ -107,12 +107,11 @@ export function PlanCard({