From de386b36bee3d179fb9dd2b126f176bed8a41b6b Mon Sep 17 00:00:00 2001 From: ethan Date: Tue, 18 Nov 2025 13:58:26 +1100 Subject: [PATCH 01/13] refactor: use message queue for compact continue messages Previously, compact continue messages were handled by a frontend hook (useAutoCompactContinue) that watched for completed compactions and then sent the continue message. This introduced complexity, race conditions, and required tracking processed message IDs. Now leverages the existing message queue system: - Backend queues continue message when compaction starts - Queue auto-sends when compaction stream ends (existing behavior) - Continue message shown in queue UI during compaction - Proper cleanup on all error paths - Strip editMessageId to prevent truncation failures after compaction Net reduction of 134 lines. Simpler, more reliable, better UX. --- src/browser/App.tsx | 4 - src/browser/api.ts | 2 +- src/browser/hooks/useAutoCompactContinue.ts | 115 -------------------- src/browser/stores/WorkspaceStore.ts | 11 +- src/common/constants/ipc-constants.ts | 2 +- src/common/constants/storage.ts | 11 -- src/common/types/message.ts | 5 - src/desktop/preload.ts | 2 +- src/node/services/agentSession.ts | 9 ++ src/node/services/ipcMain.ts | 2 +- 10 files changed, 14 insertions(+), 149 deletions(-) delete mode 100644 src/browser/hooks/useAutoCompactContinue.ts diff --git a/src/browser/App.tsx b/src/browser/App.tsx index 6860fa693..9c9109ca9 100644 --- a/src/browser/App.tsx +++ b/src/browser/App.tsx @@ -12,7 +12,6 @@ import { usePersistedState, updatePersistedState } from "./hooks/usePersistedSta import { matchesKeybind, KEYBINDS } from "./utils/ui/keybinds"; import { useResumeManager } from "./hooks/useResumeManager"; import { useUnreadTracking } from "./hooks/useUnreadTracking"; -import { useAutoCompactContinue } from "./hooks/useAutoCompactContinue"; import { useWorkspaceStoreRaw, useWorkspaceRecency } from "./stores/WorkspaceStore"; import { ChatInput } from "./components/ChatInput/index"; import type { ChatInputAPI } from "./components/ChatInput/types"; @@ -116,9 +115,6 @@ function AppInner() { // Auto-resume interrupted streams on app startup and when failures occur useResumeManager(); - // Handle auto-continue after compaction (when user uses /compact -c) - useAutoCompactContinue(); - // Sync selectedWorkspace with URL hash useEffect(() => { if (selectedWorkspace) { diff --git a/src/browser/api.ts b/src/browser/api.ts index 4314b5c90..c399b5aea 100644 --- a/src/browser/api.ts +++ b/src/browser/api.ts @@ -225,7 +225,7 @@ const webApi: IPCApi = { invokeIPC(IPC_CHANNELS.WORKSPACE_RESUME_STREAM, workspaceId, options), interruptStream: (workspaceId, options) => invokeIPC(IPC_CHANNELS.WORKSPACE_INTERRUPT_STREAM, workspaceId, options), - clearQueue: (workspaceId) => invokeIPC(IPC_CHANNELS.WORKSPACE_QUEUE_CLEAR, workspaceId), + clearQueue: (workspaceId) => invokeIPC(IPC_CHANNELS.WORKSPACE_CLEAR_QUEUE, workspaceId), truncateHistory: (workspaceId, percentage) => invokeIPC(IPC_CHANNELS.WORKSPACE_TRUNCATE_HISTORY, workspaceId, percentage), replaceChatHistory: (workspaceId, summaryMessage) => diff --git a/src/browser/hooks/useAutoCompactContinue.ts b/src/browser/hooks/useAutoCompactContinue.ts deleted file mode 100644 index d9de923a5..000000000 --- a/src/browser/hooks/useAutoCompactContinue.ts +++ /dev/null @@ -1,115 +0,0 @@ -import { useRef, useEffect } from "react"; -import { useWorkspaceStoreRaw } from "@/browser/stores/WorkspaceStore"; -import { buildSendMessageOptions } from "@/browser/hooks/useSendMessageOptions"; - -/** - * Hook to manage auto-continue after compaction using structured message metadata - * - * Approach: - * - Watches all workspaces for single compacted message (compaction just completed) - * - Reads continueMessage from the summary message's compaction-result metadata - * - Sends continue message automatically - * - * Why summary metadata? When compaction completes, history is replaced with just the - * summary message. The original compaction-request message is deleted. To preserve - * the continueMessage across this replacement, we extract it before replacement and - * store it in the summary's metadata. - * - * Self-contained: No callback needed. Hook detects condition and handles action. - * No localStorage - metadata is the single source of truth. - * - * IMPORTANT: sendMessage options (model, thinking level, mode, etc.) are managed by the - * frontend via buildSendMessageOptions. The backend does NOT fall back to workspace - * metadata - frontend must pass complete options. - */ -export function useAutoCompactContinue() { - // Get workspace states from store - // NOTE: We use a ref-based approach instead of useSyncExternalStore to avoid - // re-rendering AppInner on every workspace state change. This hook only needs - // to react when messages change to a single compacted message state. - const store = useWorkspaceStoreRaw(); - // Track which specific compaction summary messages we've already processed. - // Key insight: Each compaction creates a unique message. Track by message ID, - // not workspace ID, to prevent processing the same compaction result multiple times. - // This is obviously correct because message IDs are immutable and unique. - const processedMessageIds = useRef>(new Set()); - - // Update ref and check for auto-continue condition - const checkAutoCompact = () => { - const newStates = store.getAllStates(); - - // Check all workspaces for completed compaction - for (const [workspaceId, state] of newStates) { - // Detect if workspace is in "single compacted message" state - // Skip workspace-init messages since they're UI-only metadata - const muxMessages = state.messages.filter((m) => m.type !== "workspace-init"); - const isSingleCompacted = - muxMessages.length === 1 && - muxMessages[0]?.type === "assistant" && - muxMessages[0].isCompacted === true; - - if (!isSingleCompacted) { - // Workspace no longer in compacted state - no action needed - // Processed message IDs will naturally accumulate but stay bounded - // (one per compaction), and get cleared when user sends new messages - continue; - } - - // After compaction, history is replaced with a single summary message - // The summary message has compaction-result metadata with the continueMessage - const summaryMessage = state.muxMessages[0]; // Single compacted message - const muxMeta = summaryMessage?.metadata?.muxMetadata; - const continueMessage = - muxMeta?.type === "compaction-result" ? muxMeta.continueMessage : undefined; - - if (!continueMessage) continue; - - // Prefer compaction-request ID for idempotency; fall back to summary message ID - const idForGuard = - muxMeta?.type === "compaction-result" && muxMeta.requestId - ? `req:${muxMeta.requestId}` - : `msg:${summaryMessage.id}`; - - // Have we already processed this specific compaction result? - if (processedMessageIds.current.has(idForGuard)) continue; - - // Mark THIS RESULT as processed before sending to prevent duplicates - processedMessageIds.current.add(idForGuard); - - // Build options and send message directly - const options = buildSendMessageOptions(workspaceId); - void (async () => { - try { - const result = await window.api.workspace.sendMessage( - workspaceId, - continueMessage, - options - ); - // Check if send failed (browser API returns error object, not throw) - if (!result.success && "error" in result) { - console.error("Failed to send continue message:", result.error); - // If sending failed, remove from processed set to allow retry - processedMessageIds.current.delete(idForGuard); - } - } catch (error) { - // Handle network/parsing errors (HTTP errors, etc.) - console.error("Failed to send continue message:", error); - processedMessageIds.current.delete(idForGuard); - } - })(); - } - }; - - useEffect(() => { - // Initial check - checkAutoCompact(); - - // Subscribe to store changes and check condition - // This doesn't trigger React re-renders, just our internal check - const unsubscribe = store.subscribe(() => { - checkAutoCompact(); - }); - - return unsubscribe; - }, [store]); // eslint-disable-line react-hooks/exhaustive-deps -} diff --git a/src/browser/stores/WorkspaceStore.ts b/src/browser/stores/WorkspaceStore.ts index 8433834ae..ab7b79951 100644 --- a/src/browser/stores/WorkspaceStore.ts +++ b/src/browser/stores/WorkspaceStore.ts @@ -672,12 +672,6 @@ export class WorkspaceStore { const historicalUsage = currentUsage.usageHistory.length > 0 ? sumUsageHistory(currentUsage.usageHistory) : undefined; - // Extract continueMessage from compaction-request before history gets replaced - const compactRequestMsg = findCompactionRequestMessage(aggregator); - const muxMeta = compactRequestMsg?.metadata?.muxMetadata; - const continueMessage = - muxMeta?.type === "compaction-request" ? muxMeta.parsed.continueMessage : undefined; - const summaryMessage = createMuxMessage( `summary-${Date.now()}-${Math.random().toString(36).substring(2, 11)}`, "assistant", @@ -697,10 +691,7 @@ export class WorkspaceStore { metadata && "systemMessageTokens" in metadata ? (metadata.systemMessageTokens as number | undefined) : undefined, - // Store continueMessage in summary so it survives history replacement - muxMetadata: continueMessage - ? { type: "compaction-result", continueMessage, requestId: compactRequestMsg?.id } - : { type: "normal" }, + muxMetadata: { type: "normal" }, } ); diff --git a/src/common/constants/ipc-constants.ts b/src/common/constants/ipc-constants.ts index b02a06b47..8a118423a 100644 --- a/src/common/constants/ipc-constants.ts +++ b/src/common/constants/ipc-constants.ts @@ -25,7 +25,7 @@ export const IPC_CHANNELS = { WORKSPACE_SEND_MESSAGE: "workspace:sendMessage", WORKSPACE_RESUME_STREAM: "workspace:resumeStream", WORKSPACE_INTERRUPT_STREAM: "workspace:interruptStream", - WORKSPACE_QUEUE_CLEAR: "workspace:queue:clear", + WORKSPACE_CLEAR_QUEUE: "workspace:clearQueue", WORKSPACE_TRUNCATE_HISTORY: "workspace:truncateHistory", WORKSPACE_REPLACE_HISTORY: "workspace:replaceHistory", WORKSPACE_STREAM_HISTORY: "workspace:streamHistory", diff --git a/src/common/constants/storage.ts b/src/common/constants/storage.ts index 5a2b2f121..6ce7477ec 100644 --- a/src/common/constants/storage.ts +++ b/src/common/constants/storage.ts @@ -126,15 +126,6 @@ export const PREFERRED_COMPACTION_MODEL_KEY = "preferredCompactionModel"; */ export const VIM_ENABLED_KEY = "vimEnabled"; -/** - * Get the localStorage key for the compact continue message for a workspace - * Temporarily stores the continuation prompt for the current compaction - * Should be deleted immediately after use to prevent bugs - */ -export function getCompactContinueMessageKey(workspaceId: string): string { - return `compactContinueMessage:${workspaceId}`; -} - /** * Get the localStorage key for hunk expand/collapse state in Review tab * Stores user's manual expand/collapse preferences per hunk @@ -164,7 +155,6 @@ export function getReviewSearchStateKey(workspaceId: string): string { /** * List of workspace-scoped key functions that should be copied on fork and deleted on removal - * Note: Excludes ephemeral keys like getCompactContinueMessageKey */ const PERSISTENT_WORKSPACE_KEY_FUNCTIONS: Array<(workspaceId: string) => string> = [ getModelKey, @@ -183,7 +173,6 @@ const PERSISTENT_WORKSPACE_KEY_FUNCTIONS: Array<(workspaceId: string) => string> */ const EPHEMERAL_WORKSPACE_KEY_FUNCTIONS: Array<(workspaceId: string) => string> = [ getCancelledCompactionKey, - getCompactContinueMessageKey, ]; /** diff --git a/src/common/types/message.ts b/src/common/types/message.ts index 458f545a3..0d88b52d4 100644 --- a/src/common/types/message.ts +++ b/src/common/types/message.ts @@ -20,11 +20,6 @@ export type MuxFrontendMetadata = rawCommand: string; // The original /compact command as typed by user (for display) parsed: CompactionRequestData; } - | { - type: "compaction-result"; - continueMessage: string; // Message to send after compaction completes - requestId?: string; // ID of the compaction-request user message that produced this summary (for idempotency) - } | { type: "normal"; // Regular messages }; diff --git a/src/desktop/preload.ts b/src/desktop/preload.ts index b8a910bd5..63df40b9a 100644 --- a/src/desktop/preload.ts +++ b/src/desktop/preload.ts @@ -75,7 +75,7 @@ const api: IPCApi = { interruptStream: (workspaceId: string, options?: { abandonPartial?: boolean }) => ipcRenderer.invoke(IPC_CHANNELS.WORKSPACE_INTERRUPT_STREAM, workspaceId, options), clearQueue: (workspaceId: string) => - ipcRenderer.invoke(IPC_CHANNELS.WORKSPACE_QUEUE_CLEAR, workspaceId), + ipcRenderer.invoke(IPC_CHANNELS.WORKSPACE_CLEAR_QUEUE, workspaceId), truncateHistory: (workspaceId, percentage) => ipcRenderer.invoke(IPC_CHANNELS.WORKSPACE_TRUNCATE_HISTORY, workspaceId, percentage), replaceChatHistory: (workspaceId, summaryMessage) => diff --git a/src/node/services/agentSession.ts b/src/node/services/agentSession.ts index f49f1d61c..a3ed32dff 100644 --- a/src/node/services/agentSession.ts +++ b/src/node/services/agentSession.ts @@ -314,6 +314,15 @@ export class AgentSession { this.emitChatEvent(userMessage); + // If this is a compaction request with a continue message, queue it for auto-send after compaction + const muxMeta = options?.muxMetadata; + if (muxMeta?.type === "compaction-request" && muxMeta.parsed.continueMessage && options) { + // Strip out edit-specific and compaction-specific fields so the queued message is a fresh user message + const { muxMetadata, mode, editMessageId, ...continueOptions } = options; + this.messageQueue.add(muxMeta.parsed.continueMessage, continueOptions); + this.emitQueuedMessageChanged(); + } + if (!options?.model || options.model.trim().length === 0) { return Err( createUnknownSendMessageError("No model specified. Please select a model using /model.") diff --git a/src/node/services/ipcMain.ts b/src/node/services/ipcMain.ts index d76819023..90dcf8e95 100644 --- a/src/node/services/ipcMain.ts +++ b/src/node/services/ipcMain.ts @@ -1000,7 +1000,7 @@ export class IpcMain { } ); - ipcMain.handle(IPC_CHANNELS.WORKSPACE_QUEUE_CLEAR, (_event, workspaceId: string) => { + ipcMain.handle(IPC_CHANNELS.WORKSPACE_CLEAR_QUEUE, (_event, workspaceId: string) => { try { const session = this.getOrCreateSession(workspaceId); session.clearQueue(); From 4560627cba01b7e446d9c4cfe6491677883a425c Mon Sep 17 00:00:00 2001 From: ethan Date: Tue, 18 Nov 2025 21:50:52 +1100 Subject: [PATCH 02/13] =?UTF-8?q?=F0=9F=A4=96=20refactor:=20move=20compact?= =?UTF-8?q?ion=20logic=20to=20backend?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Move history compaction handling from WorkspaceStore to agentSession to centralize server-side operations. Backend changes: - Added handleCompactionCompletion() to detect and handle compaction stream-end events - Added handleCompactionAbort() to handle Ctrl+A (accept early with [truncated]) and Ctrl+C (cancel) flows - Added performCompaction() to atomically replace chat history with summary message - Added processedCompactionRequestIds Set to dedupe repeated compaction events - Implemented abandonPartial flag flow from IPC through to StreamAbortEvent - Extract truncated message content from history instead of partialService - Wrap compaction handlers in try-catch to ensure stream events always forwarded - Made performCompaction return Result type for proper error handling Frontend changes: - Removed compaction handlers from WorkspaceStore - Simplified cancelCompaction() to just call interruptStream with abandonPartial flag - Fixed Ctrl+A keybind to pass abandonPartial: false for early accept Shared changes: - Updated StreamAbortEvent to include abandonPartial flag - historyService.clearHistory() now returns deleted sequence numbers - Created calculateCumulativeUsage() utility to extract and sum usage from messages - aiService respects abandonPartial flag - skips committing when true --- bun.lock | 137 +------------ src/browser/stores/WorkspaceStore.ts | 218 +------------------- src/common/types/stream.ts | 1 + src/common/utils/compaction/handler.ts | 27 +-- src/common/utils/tokens/displayUsage.ts | 40 ++++ src/node/services/agentSession.ts | 254 +++++++++++++++++++++++- src/node/services/aiService.ts | 4 +- src/node/services/historyService.ts | 4 +- src/node/services/ipcMain.ts | 11 +- src/node/services/streamManager.ts | 10 +- 10 files changed, 314 insertions(+), 392 deletions(-) diff --git a/bun.lock b/bun.lock index 533a8e1b0..fbd3ef32c 100644 --- a/bun.lock +++ b/bun.lock @@ -1,5 +1,6 @@ { "lockfileVersion": 1, + "configVersion": 0, "workspaces": { "": { "name": "mux", @@ -934,7 +935,7 @@ "@types/ms": ["@types/ms@2.1.0", "", {}, "sha512-GsCCIZDE/p3i96vtEqx+7dBUGXrc7zeSK3wwPHIaRThS+9OhWIXRqzs4d6k1SVU8g91DrNRWxWUGhp5KXQb2VA=="], - "@types/node": ["@types/node@22.19.1", "", { "dependencies": { "undici-types": "~6.21.0" } }, "sha512-LCCV0HdSZZZb34qifBsyWlUmok6W7ouER+oQIGBScS8EsZsQbrtFTUrDX4hOl+CS6p7cnNC4td+qrSVGSCTUfQ=="], + "@types/node": ["@types/node@24.10.1", "", { "dependencies": { "undici-types": "~7.16.0" } }, "sha512-GNWcUTRBgIRJD5zj+Tq0fKOJ5XZajIiBroOF0yvj2bSU1WvNdYS/dn9UxwsujGW4JX06dnHyjV2y9rRaybH0iQ=="], "@types/plist": ["@types/plist@3.0.5", "", { "dependencies": { "@types/node": "*", "xmlbuilder": ">=11.0.1" } }, "sha512-E6OCaRmAe4WDmWNsL/9RMqdkkzDCY1etutkflWk4c+AcjDU07Pcz1fQwTX0TQz+Pxqn9i4L1TU3UFpjnrcDgxA=="], @@ -2952,7 +2953,7 @@ "undici": ["undici@7.16.0", "", {}, "sha512-QEg3HPMll0o3t2ourKwOeUAZ159Kn9mx5pnzHRQO8+Wixmh88YdZRiIwat0iNzNNXn0yoEtXJqFpyW7eM8BV7g=="], - "undici-types": ["undici-types@6.21.0", "", {}, "sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ=="], + "undici-types": ["undici-types@7.16.0", "", {}, "sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw=="], "unified": ["unified@11.0.5", "", { "dependencies": { "@types/unist": "^3.0.0", "bail": "^2.0.0", "devlop": "^1.0.0", "extend": "^3.0.0", "is-plain-obj": "^4.0.0", "trough": "^2.0.0", "vfile": "^6.0.0" } }, "sha512-xKvGhPWw3k84Qjh8bI3ZeJjqnyadK+GEFtazSfZv/rKeTkTjOJho6mFqh2SM96iIcZokxiOpg78GazTSg8+KHA=="], @@ -3142,26 +3143,14 @@ "@istanbuljs/load-nyc-config/js-yaml": ["js-yaml@3.14.1", "", { "dependencies": { "argparse": "^1.0.7", "esprima": "^4.0.0" }, "bin": { "js-yaml": "bin/js-yaml.js" } }, "sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g=="], - "@jest/console/@types/node": ["@types/node@24.10.1", "", { "dependencies": { "undici-types": "~7.16.0" } }, "sha512-GNWcUTRBgIRJD5zj+Tq0fKOJ5XZajIiBroOF0yvj2bSU1WvNdYS/dn9UxwsujGW4JX06dnHyjV2y9rRaybH0iQ=="], - "@jest/console/chalk": ["chalk@4.1.2", "", { "dependencies": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" } }, "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA=="], - "@jest/core/@types/node": ["@types/node@24.10.1", "", { "dependencies": { "undici-types": "~7.16.0" } }, "sha512-GNWcUTRBgIRJD5zj+Tq0fKOJ5XZajIiBroOF0yvj2bSU1WvNdYS/dn9UxwsujGW4JX06dnHyjV2y9rRaybH0iQ=="], - "@jest/core/ansi-escapes": ["ansi-escapes@4.3.2", "", { "dependencies": { "type-fest": "^0.21.3" } }, "sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ=="], "@jest/core/chalk": ["chalk@4.1.2", "", { "dependencies": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" } }, "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA=="], "@jest/core/ci-info": ["ci-info@4.3.1", "", {}, "sha512-Wdy2Igu8OcBpI2pZePZ5oWjPC38tmDVx5WKUXKwlLYkA0ozo85sLsLvkBbBn/sZaSCMFOGZJ14fvW9t5/d7kdA=="], - "@jest/environment/@types/node": ["@types/node@24.10.1", "", { "dependencies": { "undici-types": "~7.16.0" } }, "sha512-GNWcUTRBgIRJD5zj+Tq0fKOJ5XZajIiBroOF0yvj2bSU1WvNdYS/dn9UxwsujGW4JX06dnHyjV2y9rRaybH0iQ=="], - - "@jest/fake-timers/@types/node": ["@types/node@24.10.1", "", { "dependencies": { "undici-types": "~7.16.0" } }, "sha512-GNWcUTRBgIRJD5zj+Tq0fKOJ5XZajIiBroOF0yvj2bSU1WvNdYS/dn9UxwsujGW4JX06dnHyjV2y9rRaybH0iQ=="], - - "@jest/pattern/@types/node": ["@types/node@24.10.1", "", { "dependencies": { "undici-types": "~7.16.0" } }, "sha512-GNWcUTRBgIRJD5zj+Tq0fKOJ5XZajIiBroOF0yvj2bSU1WvNdYS/dn9UxwsujGW4JX06dnHyjV2y9rRaybH0iQ=="], - - "@jest/reporters/@types/node": ["@types/node@24.10.1", "", { "dependencies": { "undici-types": "~7.16.0" } }, "sha512-GNWcUTRBgIRJD5zj+Tq0fKOJ5XZajIiBroOF0yvj2bSU1WvNdYS/dn9UxwsujGW4JX06dnHyjV2y9rRaybH0iQ=="], - "@jest/reporters/chalk": ["chalk@4.1.2", "", { "dependencies": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" } }, "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA=="], "@jest/reporters/istanbul-lib-instrument": ["istanbul-lib-instrument@6.0.3", "", { "dependencies": { "@babel/core": "^7.23.9", "@babel/parser": "^7.23.9", "@istanbuljs/schema": "^0.1.3", "istanbul-lib-coverage": "^3.2.0", "semver": "^7.5.4" } }, "sha512-Vtgk7L/R2JHyyGW07spoFlB8/lpjiOLTjMdms6AFMraYt3BaJauod/NGrfnVG/y4Ix1JEuMRPDPEj2ua+zz1/Q=="], @@ -3176,8 +3165,6 @@ "@jest/transform/write-file-atomic": ["write-file-atomic@5.0.1", "", { "dependencies": { "imurmurhash": "^0.1.4", "signal-exit": "^4.0.1" } }, "sha512-+QU2zd6OTD8XWIJCbffaiQeH9U73qIqafo1x6V1snCWYGJf6cVE0cDR4D8xRzcEnfI21IFrUPzPGtcPf8AC+Rw=="], - "@jest/types/@types/node": ["@types/node@24.10.1", "", { "dependencies": { "undici-types": "~7.16.0" } }, "sha512-GNWcUTRBgIRJD5zj+Tq0fKOJ5XZajIiBroOF0yvj2bSU1WvNdYS/dn9UxwsujGW4JX06dnHyjV2y9rRaybH0iQ=="], - "@jest/types/chalk": ["chalk@4.1.2", "", { "dependencies": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" } }, "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA=="], "@malept/flatpak-bundler/fs-extra": ["fs-extra@9.1.0", "", { "dependencies": { "at-least-node": "^1.0.0", "graceful-fs": "^4.2.0", "jsonfile": "^6.0.1", "universalify": "^2.0.0" } }, "sha512-hcg3ZmepS30/7BSFqRvoo3DOMQu7IjqxO5nCDt+zM9XWjb33Wg7ziNT+Qvqbuc3+gWpzO02JubVyk2G4Zvo1OQ=="], @@ -3254,36 +3241,6 @@ "@testing-library/jest-dom/dom-accessibility-api": ["dom-accessibility-api@0.6.3", "", {}, "sha512-7ZgogeTnjuHbo+ct10G9Ffp0mif17idi0IyWNVA/wcwcm7NPOD/WEHVP3n7n3MhXqxoIYm8d6MuZohYWIZ4T3w=="], - "@types/body-parser/@types/node": ["@types/node@24.10.1", "", { "dependencies": { "undici-types": "~7.16.0" } }, "sha512-GNWcUTRBgIRJD5zj+Tq0fKOJ5XZajIiBroOF0yvj2bSU1WvNdYS/dn9UxwsujGW4JX06dnHyjV2y9rRaybH0iQ=="], - - "@types/cacheable-request/@types/node": ["@types/node@24.10.1", "", { "dependencies": { "undici-types": "~7.16.0" } }, "sha512-GNWcUTRBgIRJD5zj+Tq0fKOJ5XZajIiBroOF0yvj2bSU1WvNdYS/dn9UxwsujGW4JX06dnHyjV2y9rRaybH0iQ=="], - - "@types/connect/@types/node": ["@types/node@24.10.1", "", { "dependencies": { "undici-types": "~7.16.0" } }, "sha512-GNWcUTRBgIRJD5zj+Tq0fKOJ5XZajIiBroOF0yvj2bSU1WvNdYS/dn9UxwsujGW4JX06dnHyjV2y9rRaybH0iQ=="], - - "@types/cors/@types/node": ["@types/node@24.10.1", "", { "dependencies": { "undici-types": "~7.16.0" } }, "sha512-GNWcUTRBgIRJD5zj+Tq0fKOJ5XZajIiBroOF0yvj2bSU1WvNdYS/dn9UxwsujGW4JX06dnHyjV2y9rRaybH0iQ=="], - - "@types/express-serve-static-core/@types/node": ["@types/node@24.10.1", "", { "dependencies": { "undici-types": "~7.16.0" } }, "sha512-GNWcUTRBgIRJD5zj+Tq0fKOJ5XZajIiBroOF0yvj2bSU1WvNdYS/dn9UxwsujGW4JX06dnHyjV2y9rRaybH0iQ=="], - - "@types/fs-extra/@types/node": ["@types/node@24.10.1", "", { "dependencies": { "undici-types": "~7.16.0" } }, "sha512-GNWcUTRBgIRJD5zj+Tq0fKOJ5XZajIiBroOF0yvj2bSU1WvNdYS/dn9UxwsujGW4JX06dnHyjV2y9rRaybH0iQ=="], - - "@types/keyv/@types/node": ["@types/node@24.10.1", "", { "dependencies": { "undici-types": "~7.16.0" } }, "sha512-GNWcUTRBgIRJD5zj+Tq0fKOJ5XZajIiBroOF0yvj2bSU1WvNdYS/dn9UxwsujGW4JX06dnHyjV2y9rRaybH0iQ=="], - - "@types/plist/@types/node": ["@types/node@24.10.1", "", { "dependencies": { "undici-types": "~7.16.0" } }, "sha512-GNWcUTRBgIRJD5zj+Tq0fKOJ5XZajIiBroOF0yvj2bSU1WvNdYS/dn9UxwsujGW4JX06dnHyjV2y9rRaybH0iQ=="], - - "@types/responselike/@types/node": ["@types/node@24.10.1", "", { "dependencies": { "undici-types": "~7.16.0" } }, "sha512-GNWcUTRBgIRJD5zj+Tq0fKOJ5XZajIiBroOF0yvj2bSU1WvNdYS/dn9UxwsujGW4JX06dnHyjV2y9rRaybH0iQ=="], - - "@types/send/@types/node": ["@types/node@24.10.1", "", { "dependencies": { "undici-types": "~7.16.0" } }, "sha512-GNWcUTRBgIRJD5zj+Tq0fKOJ5XZajIiBroOF0yvj2bSU1WvNdYS/dn9UxwsujGW4JX06dnHyjV2y9rRaybH0iQ=="], - - "@types/serve-static/@types/node": ["@types/node@24.10.1", "", { "dependencies": { "undici-types": "~7.16.0" } }, "sha512-GNWcUTRBgIRJD5zj+Tq0fKOJ5XZajIiBroOF0yvj2bSU1WvNdYS/dn9UxwsujGW4JX06dnHyjV2y9rRaybH0iQ=="], - - "@types/wait-on/@types/node": ["@types/node@24.10.1", "", { "dependencies": { "undici-types": "~7.16.0" } }, "sha512-GNWcUTRBgIRJD5zj+Tq0fKOJ5XZajIiBroOF0yvj2bSU1WvNdYS/dn9UxwsujGW4JX06dnHyjV2y9rRaybH0iQ=="], - - "@types/write-file-atomic/@types/node": ["@types/node@24.10.1", "", { "dependencies": { "undici-types": "~7.16.0" } }, "sha512-GNWcUTRBgIRJD5zj+Tq0fKOJ5XZajIiBroOF0yvj2bSU1WvNdYS/dn9UxwsujGW4JX06dnHyjV2y9rRaybH0iQ=="], - - "@types/ws/@types/node": ["@types/node@24.10.1", "", { "dependencies": { "undici-types": "~7.16.0" } }, "sha512-GNWcUTRBgIRJD5zj+Tq0fKOJ5XZajIiBroOF0yvj2bSU1WvNdYS/dn9UxwsujGW4JX06dnHyjV2y9rRaybH0iQ=="], - - "@types/yauzl/@types/node": ["@types/node@24.10.1", "", { "dependencies": { "undici-types": "~7.16.0" } }, "sha512-GNWcUTRBgIRJD5zj+Tq0fKOJ5XZajIiBroOF0yvj2bSU1WvNdYS/dn9UxwsujGW4JX06dnHyjV2y9rRaybH0iQ=="], - "@typescript-eslint/typescript-estree/minimatch": ["minimatch@9.0.5", "", { "dependencies": { "brace-expansion": "^2.0.1" } }, "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow=="], "@vitest/mocker/estree-walker": ["estree-walker@3.0.3", "", { "dependencies": { "@types/estree": "^1.0.0" } }, "sha512-7RUKfXgSMMkzt6ZuXmqapOurLGPPfgj6l9uRZ7lRGolvk0y2yocc35LdcxKC5PQZdn2DMqioAQ2NoWcrTKmm6g=="], @@ -3302,8 +3259,6 @@ "builder-util/chalk": ["chalk@4.1.2", "", { "dependencies": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" } }, "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA=="], - "bun-types/@types/node": ["@types/node@24.10.1", "", { "dependencies": { "undici-types": "~7.16.0" } }, "sha512-GNWcUTRBgIRJD5zj+Tq0fKOJ5XZajIiBroOF0yvj2bSU1WvNdYS/dn9UxwsujGW4JX06dnHyjV2y9rRaybH0iQ=="], - "cacache/fs-minipass": ["fs-minipass@3.0.3", "", { "dependencies": { "minipass": "^7.0.3" } }, "sha512-XUBA9XClHbnJWSfBzjkm6RvPsyg3sryZt06BEQoXcF7EK/xpGaQYJgQKDJSUH5SGZ76Y7pFx1QBnXz09rU5Fbw=="], "cacache/lru-cache": ["lru-cache@10.4.3", "", {}, "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ=="], @@ -3342,6 +3297,8 @@ "dom-serializer/entities": ["entities@2.2.0", "", {}, "sha512-p92if5Nz619I0w+akJrLZH0MX0Pb5DX39XOwQTtXSdQQOaYH03S1uIQp4mhOZtAXrxq4ViO67YTiLBo2638o9A=="], + "electron/@types/node": ["@types/node@22.19.1", "", { "dependencies": { "undici-types": "~6.21.0" } }, "sha512-LCCV0HdSZZZb34qifBsyWlUmok6W7ouER+oQIGBScS8EsZsQbrtFTUrDX4hOl+CS6p7cnNC4td+qrSVGSCTUfQ=="], + "electron-builder/chalk": ["chalk@4.1.2", "", { "dependencies": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" } }, "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA=="], "electron-publish/chalk": ["chalk@4.1.2", "", { "dependencies": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" } }, "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA=="], @@ -3422,8 +3379,6 @@ "istanbul-lib-report/supports-color": ["supports-color@7.2.0", "", { "dependencies": { "has-flag": "^4.0.0" } }, "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw=="], - "jest-circus/@types/node": ["@types/node@24.10.1", "", { "dependencies": { "undici-types": "~7.16.0" } }, "sha512-GNWcUTRBgIRJD5zj+Tq0fKOJ5XZajIiBroOF0yvj2bSU1WvNdYS/dn9UxwsujGW4JX06dnHyjV2y9rRaybH0iQ=="], - "jest-circus/chalk": ["chalk@4.1.2", "", { "dependencies": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" } }, "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA=="], "jest-cli/chalk": ["chalk@4.1.2", "", { "dependencies": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" } }, "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA=="], @@ -3436,40 +3391,28 @@ "jest-each/chalk": ["chalk@4.1.2", "", { "dependencies": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" } }, "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA=="], - "jest-environment-node/@types/node": ["@types/node@24.10.1", "", { "dependencies": { "undici-types": "~7.16.0" } }, "sha512-GNWcUTRBgIRJD5zj+Tq0fKOJ5XZajIiBroOF0yvj2bSU1WvNdYS/dn9UxwsujGW4JX06dnHyjV2y9rRaybH0iQ=="], - - "jest-haste-map/@types/node": ["@types/node@24.10.1", "", { "dependencies": { "undici-types": "~7.16.0" } }, "sha512-GNWcUTRBgIRJD5zj+Tq0fKOJ5XZajIiBroOF0yvj2bSU1WvNdYS/dn9UxwsujGW4JX06dnHyjV2y9rRaybH0iQ=="], - "jest-haste-map/fsevents": ["fsevents@2.3.3", "", { "os": "darwin" }, "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw=="], "jest-matcher-utils/chalk": ["chalk@4.1.2", "", { "dependencies": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" } }, "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA=="], "jest-message-util/chalk": ["chalk@4.1.2", "", { "dependencies": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" } }, "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA=="], - "jest-mock/@types/node": ["@types/node@24.10.1", "", { "dependencies": { "undici-types": "~7.16.0" } }, "sha512-GNWcUTRBgIRJD5zj+Tq0fKOJ5XZajIiBroOF0yvj2bSU1WvNdYS/dn9UxwsujGW4JX06dnHyjV2y9rRaybH0iQ=="], - "jest-process-manager/chalk": ["chalk@4.1.2", "", { "dependencies": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" } }, "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA=="], "jest-process-manager/signal-exit": ["signal-exit@3.0.7", "", {}, "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ=="], "jest-resolve/chalk": ["chalk@4.1.2", "", { "dependencies": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" } }, "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA=="], - "jest-runner/@types/node": ["@types/node@24.10.1", "", { "dependencies": { "undici-types": "~7.16.0" } }, "sha512-GNWcUTRBgIRJD5zj+Tq0fKOJ5XZajIiBroOF0yvj2bSU1WvNdYS/dn9UxwsujGW4JX06dnHyjV2y9rRaybH0iQ=="], - "jest-runner/chalk": ["chalk@4.1.2", "", { "dependencies": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" } }, "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA=="], "jest-runner/source-map-support": ["source-map-support@0.5.13", "", { "dependencies": { "buffer-from": "^1.0.0", "source-map": "^0.6.0" } }, "sha512-SHSKFHadjVA5oR4PPqhtAVdcBWwRYVd6g6cAXnIbRiIwc2EhPrTuKUBdSLvlEKyIP3GCf89fltvcZiP9MMFA1w=="], - "jest-runtime/@types/node": ["@types/node@24.10.1", "", { "dependencies": { "undici-types": "~7.16.0" } }, "sha512-GNWcUTRBgIRJD5zj+Tq0fKOJ5XZajIiBroOF0yvj2bSU1WvNdYS/dn9UxwsujGW4JX06dnHyjV2y9rRaybH0iQ=="], - "jest-runtime/chalk": ["chalk@4.1.2", "", { "dependencies": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" } }, "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA=="], "jest-runtime/strip-bom": ["strip-bom@4.0.0", "", {}, "sha512-3xurFv5tEgii33Zi8Jtp55wEIILR9eh34FAW00PZf+JnSsTmV/ioewSgQl97JHvgjoRGwPShsWm+IdrxB35d0w=="], "jest-snapshot/chalk": ["chalk@4.1.2", "", { "dependencies": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" } }, "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA=="], - "jest-util/@types/node": ["@types/node@24.10.1", "", { "dependencies": { "undici-types": "~7.16.0" } }, "sha512-GNWcUTRBgIRJD5zj+Tq0fKOJ5XZajIiBroOF0yvj2bSU1WvNdYS/dn9UxwsujGW4JX06dnHyjV2y9rRaybH0iQ=="], - "jest-util/chalk": ["chalk@4.1.2", "", { "dependencies": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" } }, "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA=="], "jest-util/ci-info": ["ci-info@4.3.1", "", {}, "sha512-Wdy2Igu8OcBpI2pZePZ5oWjPC38tmDVx5WKUXKwlLYkA0ozo85sLsLvkBbBn/sZaSCMFOGZJ14fvW9t5/d7kdA=="], @@ -3480,16 +3423,12 @@ "jest-watch-typeahead/strip-ansi": ["strip-ansi@7.1.2", "", { "dependencies": { "ansi-regex": "^6.0.1" } }, "sha512-gmBGslpoQJtgnMAvOVqGZpEz9dyoKTCzy2nfz/n8aIFhN/jCE/rCmcxabB6jOOHV+0WNnylOxaxBQPSvcWklhA=="], - "jest-watcher/@types/node": ["@types/node@24.10.1", "", { "dependencies": { "undici-types": "~7.16.0" } }, "sha512-GNWcUTRBgIRJD5zj+Tq0fKOJ5XZajIiBroOF0yvj2bSU1WvNdYS/dn9UxwsujGW4JX06dnHyjV2y9rRaybH0iQ=="], - "jest-watcher/ansi-escapes": ["ansi-escapes@4.3.2", "", { "dependencies": { "type-fest": "^0.21.3" } }, "sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ=="], "jest-watcher/chalk": ["chalk@4.1.2", "", { "dependencies": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" } }, "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA=="], "jest-watcher/string-length": ["string-length@4.0.2", "", { "dependencies": { "char-regex": "^1.0.2", "strip-ansi": "^6.0.0" } }, "sha512-+l6rNN5fYHNhZZy41RXsYptCjA2Igmq4EG7kZAYFQI1E1VTXarr6ZPXBg6eq7Y6eK4FEhY6AJlyuFIb/v/S0VQ=="], - "jest-worker/@types/node": ["@types/node@24.10.1", "", { "dependencies": { "undici-types": "~7.16.0" } }, "sha512-GNWcUTRBgIRJD5zj+Tq0fKOJ5XZajIiBroOF0yvj2bSU1WvNdYS/dn9UxwsujGW4JX06dnHyjV2y9rRaybH0iQ=="], - "jszip/readable-stream": ["readable-stream@2.3.8", "", { "dependencies": { "core-util-is": "~1.0.0", "inherits": "~2.0.3", "isarray": "~1.0.0", "process-nextick-args": "~2.0.0", "safe-buffer": "~5.1.1", "string_decoder": "~1.1.1", "util-deprecate": "~1.0.1" } }, "sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA=="], "katex/commander": ["commander@8.3.0", "", {}, "sha512-OkTL9umf+He2DZkUq8f8J9of7yL6RJKI24dVITBmNfZBmri9zYZQrKkuXiKhyfPSu8tUhnVBB1iKXevvnlR4Ww=="], @@ -3624,32 +3563,18 @@ "@istanbuljs/load-nyc-config/js-yaml/argparse": ["argparse@1.0.10", "", { "dependencies": { "sprintf-js": "~1.0.2" } }, "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg=="], - "@jest/console/@types/node/undici-types": ["undici-types@7.16.0", "", {}, "sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw=="], - "@jest/console/chalk/supports-color": ["supports-color@7.2.0", "", { "dependencies": { "has-flag": "^4.0.0" } }, "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw=="], - "@jest/core/@types/node/undici-types": ["undici-types@7.16.0", "", {}, "sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw=="], - "@jest/core/ansi-escapes/type-fest": ["type-fest@0.21.3", "", {}, "sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w=="], "@jest/core/chalk/supports-color": ["supports-color@7.2.0", "", { "dependencies": { "has-flag": "^4.0.0" } }, "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw=="], - "@jest/environment/@types/node/undici-types": ["undici-types@7.16.0", "", {}, "sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw=="], - - "@jest/fake-timers/@types/node/undici-types": ["undici-types@7.16.0", "", {}, "sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw=="], - - "@jest/pattern/@types/node/undici-types": ["undici-types@7.16.0", "", {}, "sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw=="], - - "@jest/reporters/@types/node/undici-types": ["undici-types@7.16.0", "", {}, "sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw=="], - "@jest/reporters/chalk/supports-color": ["supports-color@7.2.0", "", { "dependencies": { "has-flag": "^4.0.0" } }, "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw=="], "@jest/snapshot-utils/chalk/supports-color": ["supports-color@7.2.0", "", { "dependencies": { "has-flag": "^4.0.0" } }, "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw=="], "@jest/transform/chalk/supports-color": ["supports-color@7.2.0", "", { "dependencies": { "has-flag": "^4.0.0" } }, "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw=="], - "@jest/types/@types/node/undici-types": ["undici-types@7.16.0", "", {}, "sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw=="], - "@jest/types/chalk/supports-color": ["supports-color@7.2.0", "", { "dependencies": { "has-flag": "^4.0.0" } }, "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw=="], "@radix-ui/react-arrow/@radix-ui/react-primitive/@radix-ui/react-slot": ["@radix-ui/react-slot@1.2.3", "", { "dependencies": { "@radix-ui/react-compose-refs": "1.1.2" }, "peerDependencies": { "@types/react": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react"] }, "sha512-aeNmHnBxbi2St0au6VBVC7JXFlhLlOnvIIlePNniyUNAClzmtAUEY8/pBiK3iHjufOlwA+c20/8jngo7xcrg8A=="], @@ -3680,36 +3605,6 @@ "@testing-library/dom/pretty-format/react-is": ["react-is@17.0.2", "", {}, "sha512-w2GsyukL62IJnlaff/nRegPQR94C/XXamvMWmSHRJ4y7Ts/4ocGRmTHvOs8PSE6pB3dWOrD/nueuU5sduBsQ4w=="], - "@types/body-parser/@types/node/undici-types": ["undici-types@7.16.0", "", {}, "sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw=="], - - "@types/cacheable-request/@types/node/undici-types": ["undici-types@7.16.0", "", {}, "sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw=="], - - "@types/connect/@types/node/undici-types": ["undici-types@7.16.0", "", {}, "sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw=="], - - "@types/cors/@types/node/undici-types": ["undici-types@7.16.0", "", {}, "sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw=="], - - "@types/express-serve-static-core/@types/node/undici-types": ["undici-types@7.16.0", "", {}, "sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw=="], - - "@types/fs-extra/@types/node/undici-types": ["undici-types@7.16.0", "", {}, "sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw=="], - - "@types/keyv/@types/node/undici-types": ["undici-types@7.16.0", "", {}, "sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw=="], - - "@types/plist/@types/node/undici-types": ["undici-types@7.16.0", "", {}, "sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw=="], - - "@types/responselike/@types/node/undici-types": ["undici-types@7.16.0", "", {}, "sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw=="], - - "@types/send/@types/node/undici-types": ["undici-types@7.16.0", "", {}, "sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw=="], - - "@types/serve-static/@types/node/undici-types": ["undici-types@7.16.0", "", {}, "sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw=="], - - "@types/wait-on/@types/node/undici-types": ["undici-types@7.16.0", "", {}, "sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw=="], - - "@types/write-file-atomic/@types/node/undici-types": ["undici-types@7.16.0", "", {}, "sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw=="], - - "@types/ws/@types/node/undici-types": ["undici-types@7.16.0", "", {}, "sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw=="], - - "@types/yauzl/@types/node/undici-types": ["undici-types@7.16.0", "", {}, "sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw=="], - "@typescript-eslint/typescript-estree/minimatch/brace-expansion": ["brace-expansion@2.0.2", "", { "dependencies": { "balanced-match": "^1.0.0" } }, "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ=="], "app-builder-lib/minimatch/brace-expansion": ["brace-expansion@2.0.2", "", { "dependencies": { "balanced-match": "^1.0.0" } }, "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ=="], @@ -3724,8 +3619,6 @@ "builder-util/chalk/supports-color": ["supports-color@7.2.0", "", { "dependencies": { "has-flag": "^4.0.0" } }, "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw=="], - "bun-types/@types/node/undici-types": ["undici-types@7.16.0", "", {}, "sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw=="], - "cacache/tar/chownr": ["chownr@3.0.0", "", {}, "sha512-+IxzY9BZOQd/XuYPRmrvEVjF/nqj5kgT4kEq7VofrDoM1MxoRjEWkrCC3EtLi59TVawxTAn+orJwFQcrqEN1+g=="], "cacache/tar/minizlib": ["minizlib@3.1.0", "", { "dependencies": { "minipass": "^7.1.2" } }, "sha512-KZxYo1BUkWD2TVFLr0MQoM8vUUigWD3LlD83a/75BqC+4qE0Hb1Vo5v1FgcfaNXvfXzr+5EhQ6ing/CaBijTlw=="], @@ -3756,6 +3649,8 @@ "electron-rebuild/node-gyp/which": ["which@2.0.2", "", { "dependencies": { "isexe": "^2.0.0" }, "bin": { "node-which": "./bin/node-which" } }, "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA=="], + "electron/@types/node/undici-types": ["undici-types@6.21.0", "", {}, "sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ=="], + "eslint/chalk/supports-color": ["supports-color@7.2.0", "", { "dependencies": { "has-flag": "^4.0.0" } }, "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw=="], "filelist/minimatch/brace-expansion": ["brace-expansion@2.0.2", "", { "dependencies": { "balanced-match": "^1.0.0" } }, "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ=="], @@ -3768,7 +3663,7 @@ "global-prefix/which/isexe": ["isexe@2.0.0", "", {}, "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw=="], - "jest-circus/@types/node/undici-types": ["undici-types@7.16.0", "", {}, "sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw=="], + "happy-dom/@types/node/undici-types": ["undici-types@6.21.0", "", {}, "sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ=="], "jest-circus/chalk/supports-color": ["supports-color@7.2.0", "", { "dependencies": { "has-flag": "^4.0.0" } }, "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw=="], @@ -3780,46 +3675,30 @@ "jest-each/chalk/supports-color": ["supports-color@7.2.0", "", { "dependencies": { "has-flag": "^4.0.0" } }, "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw=="], - "jest-environment-node/@types/node/undici-types": ["undici-types@7.16.0", "", {}, "sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw=="], - - "jest-haste-map/@types/node/undici-types": ["undici-types@7.16.0", "", {}, "sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw=="], - "jest-matcher-utils/chalk/supports-color": ["supports-color@7.2.0", "", { "dependencies": { "has-flag": "^4.0.0" } }, "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw=="], "jest-message-util/chalk/supports-color": ["supports-color@7.2.0", "", { "dependencies": { "has-flag": "^4.0.0" } }, "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw=="], - "jest-mock/@types/node/undici-types": ["undici-types@7.16.0", "", {}, "sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw=="], - "jest-process-manager/chalk/supports-color": ["supports-color@7.2.0", "", { "dependencies": { "has-flag": "^4.0.0" } }, "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw=="], "jest-resolve/chalk/supports-color": ["supports-color@7.2.0", "", { "dependencies": { "has-flag": "^4.0.0" } }, "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw=="], - "jest-runner/@types/node/undici-types": ["undici-types@7.16.0", "", {}, "sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw=="], - "jest-runner/chalk/supports-color": ["supports-color@7.2.0", "", { "dependencies": { "has-flag": "^4.0.0" } }, "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw=="], - "jest-runtime/@types/node/undici-types": ["undici-types@7.16.0", "", {}, "sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw=="], - "jest-runtime/chalk/supports-color": ["supports-color@7.2.0", "", { "dependencies": { "has-flag": "^4.0.0" } }, "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw=="], "jest-snapshot/chalk/supports-color": ["supports-color@7.2.0", "", { "dependencies": { "has-flag": "^4.0.0" } }, "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw=="], - "jest-util/@types/node/undici-types": ["undici-types@7.16.0", "", {}, "sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw=="], - "jest-util/chalk/supports-color": ["supports-color@7.2.0", "", { "dependencies": { "has-flag": "^4.0.0" } }, "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw=="], "jest-validate/chalk/supports-color": ["supports-color@7.2.0", "", { "dependencies": { "has-flag": "^4.0.0" } }, "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw=="], "jest-watch-typeahead/strip-ansi/ansi-regex": ["ansi-regex@6.2.2", "", {}, "sha512-Bq3SmSpyFHaWjPk8If9yc6svM8c56dB5BAtW4Qbw5jHTwwXXcTLoRMkpDJp6VL0XzlWaCHTXrkFURMYmD0sLqg=="], - "jest-watcher/@types/node/undici-types": ["undici-types@7.16.0", "", {}, "sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw=="], - "jest-watcher/ansi-escapes/type-fest": ["type-fest@0.21.3", "", {}, "sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w=="], "jest-watcher/chalk/supports-color": ["supports-color@7.2.0", "", { "dependencies": { "has-flag": "^4.0.0" } }, "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw=="], - "jest-worker/@types/node/undici-types": ["undici-types@7.16.0", "", {}, "sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw=="], - "jszip/readable-stream/isarray": ["isarray@1.0.0", "", {}, "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ=="], "jszip/readable-stream/safe-buffer": ["safe-buffer@5.1.2", "", {}, "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g=="], diff --git a/src/browser/stores/WorkspaceStore.ts b/src/browser/stores/WorkspaceStore.ts index ab7b79951..6c56ea1fb 100644 --- a/src/browser/stores/WorkspaceStore.ts +++ b/src/browser/stores/WorkspaceStore.ts @@ -1,6 +1,5 @@ import assert from "@/common/utils/assert"; import type { MuxMessage, DisplayedMessage, QueuedMessage } from "@/common/types/message"; -import { createMuxMessage } from "@/common/types/message"; import type { FrontendWorkspaceMetadata } from "@/common/types/workspace"; import type { WorkspaceChatMessage } from "@/common/types/ipc"; import type { TodoItem } from "@/common/types/tools"; @@ -18,17 +17,11 @@ import { isRestoreToInput, } from "@/common/types/ipc"; import { MapStore } from "./MapStore"; -import { createDisplayUsage } from "@/common/utils/tokens/displayUsage"; +import { getUsageHistory } from "@/common/utils/tokens/displayUsage"; import { WorkspaceConsumerManager } from "./WorkspaceConsumerManager"; import type { ChatUsageDisplay } from "@/common/utils/tokens/usageAggregator"; -import { sumUsageHistory } from "@/common/utils/tokens/usageAggregator"; import type { TokenConsumer } from "@/common/types/chatStats"; import type { LanguageModelV2Usage } from "@ai-sdk/provider"; -import { getCancelledCompactionKey } from "@/common/constants/storage"; -import { - isCompactingStream, - findCompactionRequestMessage, -} from "@/common/utils/compaction/handler"; import { createFreshRetryState } from "@/browser/utils/messages/retryState"; export interface WorkspaceState { @@ -149,10 +142,6 @@ export class WorkspaceStore { aggregator.handleStreamEnd(data as never); aggregator.clearTokenState((data as { messageId: string }).messageId); - if (this.handleCompactionCompletion(workspaceId, aggregator, data)) { - return; - } - // Reset retry state on successful stream completion updatePersistedState(getRetryStateKey(workspaceId), createFreshRetryState()); @@ -164,10 +153,6 @@ export class WorkspaceStore { aggregator.clearTokenState((data as { messageId: string }).messageId); aggregator.handleStreamAbort(data as never); - if (this.handleCompactionAbort(workspaceId, aggregator, data)) { - return; - } - this.states.bump(workspaceId); this.dispatchResumeCheck(workspaceId); this.finalizeUsageStats(workspaceId, (data as { metadata?: never }).metadata); @@ -446,42 +431,8 @@ export class WorkspaceStore { const aggregator = this.assertGet(workspaceId); const messages = aggregator.getAllMessages(); - - // Extract usage from assistant messages - const usageHistory: ChatUsageDisplay[] = []; - let cumulativeHistorical: ChatUsageDisplay | undefined; - - for (const msg of messages) { - if (msg.role === "assistant") { - // Check for historical usage from compaction summaries - // This preserves costs from messages deleted during compaction - if (msg.metadata?.historicalUsage) { - cumulativeHistorical = msg.metadata.historicalUsage; - } - - // Extract current message's usage - if (msg.metadata?.usage) { - // Use the model from this specific message (not global) - const model = msg.metadata.model ?? aggregator.getCurrentModel() ?? "unknown"; - - const usage = createDisplayUsage( - msg.metadata.usage, - model, - msg.metadata.providerMetadata - ); - - if (usage) { - usageHistory.push(usage); - } - } - } - } - - // If we have historical usage from a compaction, prepend it to history - // This ensures costs from pre-compaction messages are included in totals - if (cumulativeHistorical) { - usageHistory.unshift(cumulativeHistorical); - } + const model = aggregator.getCurrentModel(); + const usageHistory = getUsageHistory(messages, model); // Calculate total from usage history (now includes historical) const totalTokens = usageHistory.reduce( @@ -544,169 +495,6 @@ export class WorkspaceStore { return this.consumersStore.subscribeKey(workspaceId, listener); } - /** - * Handle compact_summary tool completion. - * Returns true if compaction was handled (caller should early return). - */ - // Track processed compaction-request IDs to dedupe performCompaction across duplicated events - private processedCompactionRequestIds = new Set(); - - private handleCompactionCompletion( - workspaceId: string, - aggregator: StreamingMessageAggregator, - data: WorkspaceChatMessage - ): boolean { - // Type guard: only StreamEndEvent has messageId - if (!("messageId" in data)) return false; - - // Check if this was a compaction stream - if (!isCompactingStream(aggregator)) { - return false; - } - - // Extract the compaction-request message to identify this compaction run - const compactionRequestMsg = findCompactionRequestMessage(aggregator); - if (!compactionRequestMsg) { - return false; - } - - // Dedupe: If we've already processed this compaction-request, skip re-running - if (this.processedCompactionRequestIds.has(compactionRequestMsg.id)) { - return true; // Already handled compaction for this request - } - - // Extract the summary text from the assistant's response - const summary = aggregator.getCompactionSummary(data.messageId); - if (!summary) { - console.warn("[WorkspaceStore] Compaction completed but no summary text found"); - return false; - } - - // Mark this compaction-request as processed before performing compaction - this.processedCompactionRequestIds.add(compactionRequestMsg.id); - - this.performCompaction(workspaceId, aggregator, data, summary); - return true; - } - - /** - * Handle interruption of a compaction stream (StreamAbortEvent). - * - * Two distinct flows trigger this: - * - **Ctrl+A (accept early)**: Perform compaction with [truncated] sentinel - * - **Ctrl+C (cancel)**: Skip compaction, let cancelCompaction handle cleanup - * - * Uses localStorage to distinguish flows: - * - Checks for cancellation marker in localStorage - * - Verifies messageId matches for freshness - * - Reload-safe: localStorage persists across page reloads - */ - private handleCompactionAbort( - workspaceId: string, - aggregator: StreamingMessageAggregator, - data: WorkspaceChatMessage - ): boolean { - // Type guard: only StreamAbortEvent has messageId - if (!("messageId" in data)) return false; - - // Check if this was a compaction stream - if (!isCompactingStream(aggregator)) { - return false; - } - - // Get the compaction request message for ID verification - const compactionRequestMsg = findCompactionRequestMessage(aggregator); - if (!compactionRequestMsg) { - return false; - } - - // Ctrl+C flow: Check localStorage for cancellation marker - // Verify compaction-request user message ID matches (stable across retries) - const storageKey = getCancelledCompactionKey(workspaceId); - const cancelData = localStorage.getItem(storageKey); - if (cancelData) { - try { - const parsed = JSON.parse(cancelData) as { compactionRequestId: string; timestamp: number }; - if (parsed.compactionRequestId === compactionRequestMsg.id) { - // This is a cancelled compaction - clean up marker and skip compaction - localStorage.removeItem(storageKey); - return false; // Skip compaction, cancelCompaction() handles cleanup - } - } catch (error) { - console.error("[WorkspaceStore] Failed to parse cancellation data:", error); - } - // If compactionRequestId doesn't match or parse failed, clean up stale data - localStorage.removeItem(storageKey); - } - - // Ctrl+A flow: Accept early with [truncated] sentinel - const partialSummary = aggregator.getCompactionSummary(data.messageId); - if (!partialSummary) { - console.warn("[WorkspaceStore] Compaction aborted but no partial summary found"); - return false; - } - - // Append [truncated] sentinel on new line to indicate incomplete summary - const truncatedSummary = partialSummary.trim() + "\n\n[truncated]"; - - this.performCompaction(workspaceId, aggregator, data, truncatedSummary); - return true; - } - - /** - * Perform history compaction by replacing chat history with summary message. - * Type-safe: only called when we've verified data is a StreamEndEvent. - */ - private performCompaction( - workspaceId: string, - aggregator: StreamingMessageAggregator, - data: WorkspaceChatMessage, - summary: string - ): void { - // Extract metadata safely with type guard - const metadata = "metadata" in data ? data.metadata : undefined; - - // Calculate cumulative historical usage before replacing history - // This preserves costs from all messages that are about to be deleted - const currentUsage = this.getWorkspaceUsage(workspaceId); - const historicalUsage = - currentUsage.usageHistory.length > 0 ? sumUsageHistory(currentUsage.usageHistory) : undefined; - - const summaryMessage = createMuxMessage( - `summary-${Date.now()}-${Math.random().toString(36).substring(2, 11)}`, - "assistant", - summary, - { - timestamp: Date.now(), - compacted: true, - model: aggregator.getCurrentModel(), - usage: metadata?.usage, - historicalUsage, // Store cumulative costs from all pre-compaction messages - providerMetadata: - metadata && "providerMetadata" in metadata - ? (metadata.providerMetadata as Record | undefined) - : undefined, - duration: metadata?.duration, - systemMessageTokens: - metadata && "systemMessageTokens" in metadata - ? (metadata.systemMessageTokens as number | undefined) - : undefined, - muxMetadata: { type: "normal" }, - } - ); - - void (async () => { - try { - await window.api.workspace.replaceChatHistory(workspaceId, summaryMessage); - } catch (error) { - console.error("[WorkspaceStore] Failed to replace history:", error); - } finally { - this.states.bump(workspaceId); - this.checkAndBumpRecencyIfChanged(); - } - })(); - } - /** * Update usage and schedule consumer calculation after stream completion. * diff --git a/src/common/types/stream.ts b/src/common/types/stream.ts index dcbf3547a..4639329a7 100644 --- a/src/common/types/stream.ts +++ b/src/common/types/stream.ts @@ -56,6 +56,7 @@ export interface StreamAbortEvent { usage?: LanguageModelV2Usage; duration?: number; }; + abandonPartial?: boolean; } export interface ErrorEvent { diff --git a/src/common/utils/compaction/handler.ts b/src/common/utils/compaction/handler.ts index adcc68bed..eaf93a0a9 100644 --- a/src/common/utils/compaction/handler.ts +++ b/src/common/utils/compaction/handler.ts @@ -12,7 +12,6 @@ */ import type { StreamingMessageAggregator } from "@/browser/utils/messages/StreamingMessageAggregator"; -import { getCancelledCompactionKey } from "@/common/constants/storage"; /** * Check if the workspace is currently in a compaction stream @@ -55,19 +54,14 @@ export function getCompactionCommand(aggregator: StreamingMessageAggregator): st * Cancel compaction (Ctrl+C flow) * * Aborts the compaction stream and puts user in edit mode for compaction-request: - * - Interrupts stream with abandonPartial flag (deletes partial, doesn't commit) - * - Skips compaction (via localStorage marker checked by handleCompactionAbort) + * - Interrupts stream with abandonPartial=true flag (backend skips compaction) * - Enters edit mode on compaction-request message * - Restores original /compact command to input for re-editing * - Leaves compaction-request message in history (can edit or delete it) * * Flow: - * 1. Store cancellation marker in localStorage with compactionRequestId for verification - * 2. Interrupt stream with {abandonPartial: true} - backend deletes partial - * 3. handleCompactionAbort checks localStorage, verifies compactionRequestId, skips compaction - * 4. Enter edit mode on compaction-request message with original command - * - * Reload-safe: localStorage persists across reloads, compactionRequestId ensures freshness + * 1. Interrupt stream with {abandonPartial: true} - backend detects and skips compaction + * 2. Enter edit mode on compaction-request message with original command */ export async function cancelCompaction( workspaceId: string, @@ -86,21 +80,8 @@ export async function cancelCompaction( return false; } - // CRITICAL: Store cancellation marker in localStorage BEFORE interrupt - // Use the compaction-request user message ID (stable across retries) - // This persists across reloads and verifies we're cancelling the right compaction - const storageKey = getCancelledCompactionKey(workspaceId); - localStorage.setItem( - storageKey, - JSON.stringify({ - compactionRequestId: compactionRequestMsg.id, - timestamp: Date.now(), - }) - ); - // Interrupt stream with abandonPartial flag - // This tells backend to DELETE the partial instead of committing it - // Result: history ends with the compaction-request user message (which is fine - just a user message) + // Backend detects this and skips compaction (Ctrl+C flow) await window.api.workspace.interruptStream(workspaceId, { abandonPartial: true }); // Enter edit mode on the compaction-request message with original command diff --git a/src/common/utils/tokens/displayUsage.ts b/src/common/utils/tokens/displayUsage.ts index b98c5e771..95d515f5e 100644 --- a/src/common/utils/tokens/displayUsage.ts +++ b/src/common/utils/tokens/displayUsage.ts @@ -8,6 +8,7 @@ import type { LanguageModelV2Usage } from "@ai-sdk/provider"; import { getModelStats } from "./modelStats"; import type { ChatUsageDisplay } from "./usageAggregator"; +import type { MuxMessage } from "@/common/types/message"; /** * Create a display-friendly usage object from AI SDK usage @@ -90,3 +91,42 @@ export function createDisplayUsage( model, // Include model for display purposes }; } + +export function getUsageHistory( + messages: MuxMessage[], + fallbackModel?: string +): ChatUsageDisplay[] { + // Extract usage from assistant messages + const usageHistory: ChatUsageDisplay[] = []; + let cumulativeHistorical: ChatUsageDisplay | undefined; + + for (const msg of messages) { + if (msg.role === "assistant") { + // Check for historical usage from compaction summaries + // This preserves costs from messages deleted during compaction + if (msg.metadata?.historicalUsage) { + cumulativeHistorical = msg.metadata.historicalUsage; + } + + // Extract current message's usage + if (msg.metadata?.usage) { + // Use the model from this specific message (not global) + const model = msg.metadata.model ?? fallbackModel ?? "unknown"; + + const usage = createDisplayUsage(msg.metadata.usage, model, msg.metadata.providerMetadata); + + if (usage) { + usageHistory.push(usage); + } + } + } + } + + // If we have historical usage from a compaction, prepend it to history + // This ensures costs from pre-compaction messages are included in totals + if (cumulativeHistorical) { + usageHistory.unshift(cumulativeHistorical); + } + + return usageHistory; +} diff --git a/src/node/services/agentSession.ts b/src/node/services/agentSession.ts index a3ed32dff..a5f53d2f5 100644 --- a/src/node/services/agentSession.ts +++ b/src/node/services/agentSession.ts @@ -15,6 +15,7 @@ import type { StreamErrorMessage, SendMessageOptions, ImagePart, + DeleteMessage, } from "@/common/types/ipc"; import type { SendMessageError } from "@/common/types/errors"; import { createUnknownSendMessageError } from "@/node/services/utils/sendMessageError"; @@ -23,6 +24,11 @@ import { Ok, Err } from "@/common/types/result"; import { enforceThinkingPolicy } from "@/browser/utils/thinking/policy"; import { createRuntime } from "@/node/runtime/runtimeFactory"; import { MessageQueue } from "./messageQueue"; +import type { StreamEndEvent, StreamAbortEvent } from "@/common/types/stream"; +import { sumUsageHistory } from "@/common/utils/tokens/usageAggregator"; +import type { ChatUsageDisplay } from "@/common/utils/tokens/usageAggregator"; +import { createDisplayUsage } from "@/common/utils/tokens/tokenStatsCalculator"; +import type { LanguageModelV2Usage } from "@ai-sdk/provider"; export interface AgentSessionChatEvent { workspaceId: string; @@ -57,6 +63,7 @@ export class AgentSession { []; private disposed = false; private readonly messageQueue = new MessageQueue(); + private readonly processedCompactionRequestIds = new Set(); constructor(options: AgentSessionOptions) { assert(options, "AgentSession requires options"); @@ -346,14 +353,14 @@ export class AgentSession { return this.streamWithHistory(model, options); } - async interruptStream(): Promise> { + async interruptStream(abandonPartial?: boolean): Promise> { this.assertNotDisposed("interruptStream"); if (!this.aiService.isStreaming(this.workspaceId)) { return Ok(undefined); } - const stopResult = await this.aiService.stopStream(this.workspaceId); + const stopResult = await this.aiService.stopStream(this.workspaceId, abandonPartial); if (!stopResult.success) { return Err(stopResult.error); } @@ -396,7 +403,10 @@ export class AgentSession { } private attachAiListeners(): void { - const forward = (event: string, handler: (payload: WorkspaceChatMessage) => void) => { + const forward = ( + event: string, + handler: (payload: WorkspaceChatMessage) => Promise | void + ) => { const wrapped = (...args: unknown[]) => { const [payload] = args; if ( @@ -407,7 +417,7 @@ export class AgentSession { ) { return; } - handler(payload as WorkspaceChatMessage); + void handler(payload as WorkspaceChatMessage); }; this.aiListeners.push({ event, handler: wrapped }); this.aiService.on(event, wrapped as never); @@ -425,14 +435,21 @@ export class AgentSession { forward("reasoning-delta", (payload) => this.emitChatEvent(payload)); forward("reasoning-end", (payload) => this.emitChatEvent(payload)); - forward("stream-end", (payload) => { - this.emitChatEvent(payload); + forward("stream-end", async (payload) => { + const handled = await this.handleCompactionCompletion(payload as StreamEndEvent); + if (!handled) { + this.emitChatEvent(payload); + } // Stream end: auto-send queued messages this.sendQueuedMessages(); }); - forward("stream-abort", (payload) => { - this.emitChatEvent(payload); + forward("stream-abort", async (payload) => { + const handled = await this.handleCompactionAbort(payload as StreamAbortEvent); + if (!handled) { + this.emitChatEvent(payload); + } + // Stream aborted: restore queued messages to input if (!this.messageQueue.isEmpty()) { const displayText = this.messageQueue.getDisplayText(); @@ -553,4 +570,225 @@ export class AgentSession { private assertNotDisposed(operation: string): void { assert(!this.disposed, `AgentSession.${operation} called after dispose`); } + + /** + * Handle compaction stream abort (Ctrl+C cancel or Ctrl+A accept early) + * + * Two flows: + * - Ctrl+C: abandonPartial=true → skip compaction + * - Ctrl+A: abandonPartial=false/undefined → perform compaction with [truncated] + */ + private async handleCompactionAbort(event: StreamAbortEvent): Promise { + // Check if the last user message is a compaction-request + const historyResult = await this.historyService.getHistory(this.workspaceId); + if (!historyResult.success) { + return false; + } + + const messages = historyResult.data; + const lastUserMsg = [...messages].reverse().find((m) => m.role === "user"); + const isCompaction = lastUserMsg?.metadata?.muxMetadata?.type === "compaction-request"; + + if (!isCompaction || !lastUserMsg) { + return false; + } + + // Ctrl+C flow: abandonPartial=true means user cancelled, skip compaction + if (event.abandonPartial === true) { + return false; + } + + // Ctrl+A flow: Accept early with [truncated] sentinel + // Get the truncated message from historyResult.data + const lastMessage = messages[messages.length - 1]; + if (!lastMessage || lastMessage.role !== "assistant") { + console.warn("[AgentSession] Compaction aborted but last message is not assistant"); + return false; + } + + const partialSummary = lastMessage.parts + .filter((part): part is { type: "text"; text: string } => part.type === "text") + .map((part) => part.text) + .join(""); + + // Append [truncated] sentinel + const truncatedSummary = partialSummary.trim() + "\n\n[truncated]"; + + // Perform compaction with truncated summary + const result = await this.performCompaction(truncatedSummary, { + model: lastMessage.metadata?.model ?? "unknown", + usage: event.metadata?.usage, + duration: event.metadata?.duration, + providerMetadata: lastMessage.metadata?.providerMetadata, + systemMessageTokens: lastMessage.metadata?.systemMessageTokens, + }); + if (!result.success) { + console.error("[AgentSession] Early compaction failed:", result.error); + return false; + } + + this.emitChatEvent(event); + return true; + } + + /** + * Handle compaction stream completion + * + * Detects when a compaction stream finishes, extracts the summary, + * and performs history replacement atomically. + */ + private async handleCompactionCompletion(event: StreamEndEvent): Promise { + // Check if the last user message is a compaction-request + const historyResult = await this.historyService.getHistory(this.workspaceId); + if (!historyResult.success) { + return false; + } + + const messages = historyResult.data; + const lastUserMsg = [...messages].reverse().find((m) => m.role === "user"); + const isCompaction = lastUserMsg?.metadata?.muxMetadata?.type === "compaction-request"; + + if (!isCompaction || !lastUserMsg) { + return false; + } + + // Dedupe: If we've already processed this compaction-request, skip + if (this.processedCompactionRequestIds.has(lastUserMsg.id)) { + return true; + } + + const summary = event.parts + .filter((part): part is { type: "text"; text: string } => part.type === "text") + .map((part) => part.text) + .join(""); + + // Mark as processed before performing compaction + this.processedCompactionRequestIds.add(lastUserMsg.id); + + const result = await this.performCompaction(summary, event.metadata); + if (!result.success) { + console.error("[AgentSession] Compaction failed:", result.error); + return false; + } + + // Emit stream-end to frontend so UI knows compaction is complete + this.emitCompactionStreamEnd(event); + return true; + } + + /** + * Perform history compaction by replacing all messages with a summary + * + * Steps: + * 1. Calculate cumulative usage from all messages (for historicalUsage field) + * 2. Clear entire history and get deleted sequence numbers + * 3. Append summary message with metadata + * 4. Emit delete event for old messages + * 5. Emit summary message to frontend + */ + private async performCompaction( + summary: string, + metadata: { + model: string; + usage?: LanguageModelV2Usage; + duration?: number; + providerMetadata?: Record; + systemMessageTokens?: number; + } + ): Promise> { + // Get all messages to calculate cumulative usage + const historyResult = await this.historyService.getHistory(this.workspaceId); + if (!historyResult.success) { + return Err(`Failed to get history for usage calculation: ${historyResult.error}`); + } + + // Calculate cumulative historical usage from all messages + const usageHistory: ChatUsageDisplay[] = []; + let cumulativeHistorical: ChatUsageDisplay | undefined; + + for (const msg of historyResult.data) { + if (msg.role === "assistant") { + // Accumulate historical usage from previous compactions + if (msg.metadata?.historicalUsage) { + cumulativeHistorical = msg.metadata.historicalUsage; + } + + // Add current message's usage + if (msg.metadata?.usage && msg.metadata.model) { + const displayUsage = createDisplayUsage( + msg.metadata.usage, + msg.metadata.model, + msg.metadata.providerMetadata + ); + if (displayUsage) { + usageHistory.push(displayUsage); + } + } + } + } + + // If we have historical usage from a compaction, prepend it to history + // This ensures costs from pre-compaction messages are included in totals + if (cumulativeHistorical) { + usageHistory.unshift(cumulativeHistorical); + } + + const historicalUsage = usageHistory.length > 0 ? sumUsageHistory(usageHistory) : undefined; + + // Clear entire history and get deleted sequences + const clearResult = await this.historyService.clearHistory(this.workspaceId); + if (!clearResult.success) { + return Err(`Failed to clear history: ${clearResult.error}`); + } + const deletedSequences = clearResult.data; + + // Create summary message with metadata + const summaryMessage = createMuxMessage( + `summary-${Date.now()}-${Math.random().toString(36).substring(2, 11)}`, + "assistant", + summary, + { + timestamp: Date.now(), + compacted: true, + model: metadata.model, + usage: metadata.usage, + historicalUsage, + providerMetadata: metadata.providerMetadata, + duration: metadata.duration, + systemMessageTokens: metadata.systemMessageTokens, + muxMetadata: { type: "normal" }, + } + ); + + // Append summary to history + const appendResult = await this.historyService.appendToHistory( + this.workspaceId, + summaryMessage + ); + if (!appendResult.success) { + return Err(`Failed to append summary: ${appendResult.error}`); + } + + // Emit delete event for old messages + if (deletedSequences.length > 0) { + const deleteMessage: DeleteMessage = { + type: "delete", + historySequences: deletedSequences, + }; + this.emitChatEvent(deleteMessage); + } + + // Emit summary message to frontend + this.emitChatEvent(summaryMessage); + + return Ok(undefined); + } + + /** + * Emit stream-end event after compaction completes + * This notifies the frontend that the stream is done + */ + private emitCompactionStreamEnd(event: StreamEndEvent): void { + this.emitChatEvent(event); + } } diff --git a/src/node/services/aiService.ts b/src/node/services/aiService.ts index bb661c15b..4e726fbbc 100644 --- a/src/node/services/aiService.ts +++ b/src/node/services/aiService.ts @@ -877,12 +877,12 @@ export class AIService extends EventEmitter { } } - async stopStream(workspaceId: string): Promise> { + async stopStream(workspaceId: string, abandonPartial?: boolean): Promise> { if (this.mockModeEnabled && this.mockScenarioPlayer) { this.mockScenarioPlayer.stop(workspaceId); return Ok(undefined); } - return this.streamManager.stopStream(workspaceId); + return this.streamManager.stopStream(workspaceId, abandonPartial); } /** diff --git a/src/node/services/historyService.ts b/src/node/services/historyService.ts index 312c89b2c..13be7fd38 100644 --- a/src/node/services/historyService.ts +++ b/src/node/services/historyService.ts @@ -418,12 +418,12 @@ export class HistoryService { }); } - async clearHistory(workspaceId: string): Promise> { + async clearHistory(workspaceId: string): Promise> { const result = await this.truncateHistory(workspaceId, 1.0); if (!result.success) { return Err(result.error); } - return Ok(undefined); + return Ok(result.data); } /** diff --git a/src/node/services/ipcMain.ts b/src/node/services/ipcMain.ts index 90dcf8e95..0284674c4 100644 --- a/src/node/services/ipcMain.ts +++ b/src/node/services/ipcMain.ts @@ -979,7 +979,7 @@ export class IpcMain { log.debug("interruptStream handler: Received", { workspaceId, options }); try { const session = this.getOrCreateSession(workspaceId); - const stopResult = await session.interruptStream(); + const stopResult = await session.interruptStream(options?.abandonPartial); if (!stopResult.success) { log.error("Failed to stop stream:", stopResult.error); return { success: false, error: stopResult.error }; @@ -1063,19 +1063,12 @@ export class IpcMain { } try { - // Get all existing messages to collect their historySequence numbers - const historyResult = await this.historyService.getHistory(workspaceId); - const deletedSequences = historyResult.success - ? historyResult.data - .map((msg) => msg.metadata?.historySequence ?? -1) - .filter((s) => s >= 0) - : []; - // Clear entire history const clearResult = await this.historyService.clearHistory(workspaceId); if (!clearResult.success) { return Err(`Failed to clear history: ${clearResult.error}`); } + const deletedSequences = clearResult.data; // Append the summary message to history (gets historySequence assigned by backend) // Frontend provides the message with all metadata (compacted, timestamp, etc.) diff --git a/src/node/services/streamManager.ts b/src/node/services/streamManager.ts index 0faebea56..0c3475340 100644 --- a/src/node/services/streamManager.ts +++ b/src/node/services/streamManager.ts @@ -225,7 +225,7 @@ export class StreamManager extends EventEmitter { const existing = this.workspaceStreams.get(workspaceId); if (existing && existing.state !== StreamState.IDLE) { - await this.cancelStreamSafely(workspaceId, existing); + await this.cancelStreamSafely(workspaceId, existing, undefined); } // Generate unique token for this stream (8 hex chars for context efficiency) @@ -408,7 +408,8 @@ export class StreamManager extends EventEmitter { private async cancelStreamSafely( workspaceId: WorkspaceId, - streamInfo: WorkspaceStreamInfo + streamInfo: WorkspaceStreamInfo, + abandonPartial?: boolean ): Promise { try { streamInfo.state = StreamState.STOPPING; @@ -432,6 +433,7 @@ export class StreamManager extends EventEmitter { workspaceId: workspaceId as string, messageId: streamInfo.messageId, metadata: { usage, duration }, + abandonPartial, }); // Clean up immediately @@ -1318,13 +1320,13 @@ export class StreamManager extends EventEmitter { /** * Stops an active stream for a workspace */ - async stopStream(workspaceId: string): Promise> { + async stopStream(workspaceId: string, abandonPartial?: boolean): Promise> { const typedWorkspaceId = workspaceId as WorkspaceId; try { const streamInfo = this.workspaceStreams.get(typedWorkspaceId); if (streamInfo) { - await this.cancelStreamSafely(typedWorkspaceId, streamInfo); + await this.cancelStreamSafely(typedWorkspaceId, streamInfo, abandonPartial); } return Ok(undefined); } catch (error) { From 4cf9c240448cfdfcccfa53601e1d0ad8ce6a4f65 Mon Sep 17 00:00:00 2001 From: ethan Date: Wed, 19 Nov 2025 02:11:03 +1100 Subject: [PATCH 03/13] fixup --- src/browser/stores/WorkspaceStore.ts | 4 +-- src/common/utils/tokens/displayUsage.ts | 3 +-- src/node/services/agentSession.ts | 34 ++----------------------- 3 files changed, 5 insertions(+), 36 deletions(-) diff --git a/src/browser/stores/WorkspaceStore.ts b/src/browser/stores/WorkspaceStore.ts index 6c56ea1fb..eeb345e69 100644 --- a/src/browser/stores/WorkspaceStore.ts +++ b/src/browser/stores/WorkspaceStore.ts @@ -17,7 +17,7 @@ import { isRestoreToInput, } from "@/common/types/ipc"; import { MapStore } from "./MapStore"; -import { getUsageHistory } from "@/common/utils/tokens/displayUsage"; +import { accUsageHistory } from "@/common/utils/tokens/displayUsage"; import { WorkspaceConsumerManager } from "./WorkspaceConsumerManager"; import type { ChatUsageDisplay } from "@/common/utils/tokens/usageAggregator"; import type { TokenConsumer } from "@/common/types/chatStats"; @@ -432,7 +432,7 @@ export class WorkspaceStore { const messages = aggregator.getAllMessages(); const model = aggregator.getCurrentModel(); - const usageHistory = getUsageHistory(messages, model); + const usageHistory = accUsageHistory(messages, model); // Calculate total from usage history (now includes historical) const totalTokens = usageHistory.reduce( diff --git a/src/common/utils/tokens/displayUsage.ts b/src/common/utils/tokens/displayUsage.ts index 95d515f5e..9fe013feb 100644 --- a/src/common/utils/tokens/displayUsage.ts +++ b/src/common/utils/tokens/displayUsage.ts @@ -92,7 +92,7 @@ export function createDisplayUsage( }; } -export function getUsageHistory( +export function accUsageHistory( messages: MuxMessage[], fallbackModel?: string ): ChatUsageDisplay[] { @@ -112,7 +112,6 @@ export function getUsageHistory( if (msg.metadata?.usage) { // Use the model from this specific message (not global) const model = msg.metadata.model ?? fallbackModel ?? "unknown"; - const usage = createDisplayUsage(msg.metadata.usage, model, msg.metadata.providerMetadata); if (usage) { diff --git a/src/node/services/agentSession.ts b/src/node/services/agentSession.ts index a5f53d2f5..ffc1f2f11 100644 --- a/src/node/services/agentSession.ts +++ b/src/node/services/agentSession.ts @@ -26,9 +26,8 @@ import { createRuntime } from "@/node/runtime/runtimeFactory"; import { MessageQueue } from "./messageQueue"; import type { StreamEndEvent, StreamAbortEvent } from "@/common/types/stream"; import { sumUsageHistory } from "@/common/utils/tokens/usageAggregator"; -import type { ChatUsageDisplay } from "@/common/utils/tokens/usageAggregator"; -import { createDisplayUsage } from "@/common/utils/tokens/tokenStatsCalculator"; import type { LanguageModelV2Usage } from "@ai-sdk/provider"; +import { accUsageHistory } from "@/common/utils/tokens/displayUsage"; export interface AgentSessionChatEvent { workspaceId: string; @@ -702,36 +701,7 @@ export class AgentSession { return Err(`Failed to get history for usage calculation: ${historyResult.error}`); } - // Calculate cumulative historical usage from all messages - const usageHistory: ChatUsageDisplay[] = []; - let cumulativeHistorical: ChatUsageDisplay | undefined; - - for (const msg of historyResult.data) { - if (msg.role === "assistant") { - // Accumulate historical usage from previous compactions - if (msg.metadata?.historicalUsage) { - cumulativeHistorical = msg.metadata.historicalUsage; - } - - // Add current message's usage - if (msg.metadata?.usage && msg.metadata.model) { - const displayUsage = createDisplayUsage( - msg.metadata.usage, - msg.metadata.model, - msg.metadata.providerMetadata - ); - if (displayUsage) { - usageHistory.push(displayUsage); - } - } - } - } - - // If we have historical usage from a compaction, prepend it to history - // This ensures costs from pre-compaction messages are included in totals - if (cumulativeHistorical) { - usageHistory.unshift(cumulativeHistorical); - } + const usageHistory = accUsageHistory(historyResult.data, undefined); const historicalUsage = usageHistory.length > 0 ? sumUsageHistory(usageHistory) : undefined; From 23b1b32be5a77f26bfb3a6345ed89198be70c29c Mon Sep 17 00:00:00 2001 From: ethan Date: Wed, 19 Nov 2025 13:21:23 +1100 Subject: [PATCH 04/13] =?UTF-8?q?=F0=9F=A4=96=20refactor:=20extract=20comp?= =?UTF-8?q?action=20logic=20to=20separate=20handler?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Move compaction handling from agentSession to dedicated CompactionHandler class. Changes: - Created src/node/services/compactionHandler.ts with CompactionHandler class - Extracted handleAbort, handleCompletion, and performCompaction methods - Updated agentSession.ts to delegate to CompactionHandler - Moved frontend compaction handler to browser utils - Reduced agentSession.ts from 764 to 589 lines Benefits: - Better separation of concerns - Easier to test compaction logic independently - Cleaner session orchestration code _Generated with `mux`_ --- src/browser/hooks/useAIViewKeybinds.ts | 2 +- src/browser/stores/WorkspaceStore.ts | 4 +- .../utils/compaction/handler.ts | 0 src/common/utils/tokens/displayUsage.ts | 2 +- src/node/services/agentSession.ts | 209 +--------------- src/node/services/compactionHandler.ts | 232 ++++++++++++++++++ 6 files changed, 246 insertions(+), 203 deletions(-) rename src/{common => browser}/utils/compaction/handler.ts (100%) create mode 100644 src/node/services/compactionHandler.ts diff --git a/src/browser/hooks/useAIViewKeybinds.ts b/src/browser/hooks/useAIViewKeybinds.ts index 4032379c3..47e86381e 100644 --- a/src/browser/hooks/useAIViewKeybinds.ts +++ b/src/browser/hooks/useAIViewKeybinds.ts @@ -8,7 +8,7 @@ import { DEFAULT_THINKING_LEVEL } from "@/common/types/thinking"; import { getThinkingPolicyForModel } from "@/browser/utils/thinking/policy"; import { getDefaultModelFromLRU } from "@/browser/hooks/useModelLRU"; import type { StreamingMessageAggregator } from "@/browser/utils/messages/StreamingMessageAggregator"; -import { isCompactingStream, cancelCompaction } from "@/common/utils/compaction/handler"; +import { isCompactingStream, cancelCompaction } from "@/browser/utils/compaction/handler"; interface UseAIViewKeybindsParams { workspaceId: string; diff --git a/src/browser/stores/WorkspaceStore.ts b/src/browser/stores/WorkspaceStore.ts index eeb345e69..ce447a3cf 100644 --- a/src/browser/stores/WorkspaceStore.ts +++ b/src/browser/stores/WorkspaceStore.ts @@ -17,7 +17,7 @@ import { isRestoreToInput, } from "@/common/types/ipc"; import { MapStore } from "./MapStore"; -import { accUsageHistory } from "@/common/utils/tokens/displayUsage"; +import { cumUsageHistory } from "@/common/utils/tokens/displayUsage"; import { WorkspaceConsumerManager } from "./WorkspaceConsumerManager"; import type { ChatUsageDisplay } from "@/common/utils/tokens/usageAggregator"; import type { TokenConsumer } from "@/common/types/chatStats"; @@ -432,7 +432,7 @@ export class WorkspaceStore { const messages = aggregator.getAllMessages(); const model = aggregator.getCurrentModel(); - const usageHistory = accUsageHistory(messages, model); + const usageHistory = cumUsageHistory(messages, model); // Calculate total from usage history (now includes historical) const totalTokens = usageHistory.reduce( diff --git a/src/common/utils/compaction/handler.ts b/src/browser/utils/compaction/handler.ts similarity index 100% rename from src/common/utils/compaction/handler.ts rename to src/browser/utils/compaction/handler.ts diff --git a/src/common/utils/tokens/displayUsage.ts b/src/common/utils/tokens/displayUsage.ts index 9fe013feb..937699b28 100644 --- a/src/common/utils/tokens/displayUsage.ts +++ b/src/common/utils/tokens/displayUsage.ts @@ -92,7 +92,7 @@ export function createDisplayUsage( }; } -export function accUsageHistory( +export function cumUsageHistory( messages: MuxMessage[], fallbackModel?: string ): ChatUsageDisplay[] { diff --git a/src/node/services/agentSession.ts b/src/node/services/agentSession.ts index ffc1f2f11..adbe96ee3 100644 --- a/src/node/services/agentSession.ts +++ b/src/node/services/agentSession.ts @@ -15,7 +15,6 @@ import type { StreamErrorMessage, SendMessageOptions, ImagePart, - DeleteMessage, } from "@/common/types/ipc"; import type { SendMessageError } from "@/common/types/errors"; import { createUnknownSendMessageError } from "@/node/services/utils/sendMessageError"; @@ -25,9 +24,7 @@ import { enforceThinkingPolicy } from "@/browser/utils/thinking/policy"; import { createRuntime } from "@/node/runtime/runtimeFactory"; import { MessageQueue } from "./messageQueue"; import type { StreamEndEvent, StreamAbortEvent } from "@/common/types/stream"; -import { sumUsageHistory } from "@/common/utils/tokens/usageAggregator"; -import type { LanguageModelV2Usage } from "@ai-sdk/provider"; -import { accUsageHistory } from "@/common/utils/tokens/displayUsage"; +import { CompactionHandler } from "./compactionHandler"; export interface AgentSessionChatEvent { workspaceId: string; @@ -62,7 +59,7 @@ export class AgentSession { []; private disposed = false; private readonly messageQueue = new MessageQueue(); - private readonly processedCompactionRequestIds = new Set(); + private readonly compactionHandler: CompactionHandler; constructor(options: AgentSessionOptions) { assert(options, "AgentSession requires options"); @@ -80,6 +77,12 @@ export class AgentSession { this.aiService = aiService; this.initStateManager = initStateManager; + this.compactionHandler = new CompactionHandler({ + workspaceId: this.workspaceId, + historyService: this.historyService, + emitter: this.emitter, + }); + this.attachAiListeners(); this.attachInitListeners(); } @@ -435,7 +438,7 @@ export class AgentSession { forward("reasoning-end", (payload) => this.emitChatEvent(payload)); forward("stream-end", async (payload) => { - const handled = await this.handleCompactionCompletion(payload as StreamEndEvent); + const handled = await this.compactionHandler.handleCompletion(payload as StreamEndEvent); if (!handled) { this.emitChatEvent(payload); } @@ -444,7 +447,7 @@ export class AgentSession { }); forward("stream-abort", async (payload) => { - const handled = await this.handleCompactionAbort(payload as StreamAbortEvent); + const handled = await this.compactionHandler.handleAbort(payload as StreamAbortEvent); if (!handled) { this.emitChatEvent(payload); } @@ -569,196 +572,4 @@ export class AgentSession { private assertNotDisposed(operation: string): void { assert(!this.disposed, `AgentSession.${operation} called after dispose`); } - - /** - * Handle compaction stream abort (Ctrl+C cancel or Ctrl+A accept early) - * - * Two flows: - * - Ctrl+C: abandonPartial=true → skip compaction - * - Ctrl+A: abandonPartial=false/undefined → perform compaction with [truncated] - */ - private async handleCompactionAbort(event: StreamAbortEvent): Promise { - // Check if the last user message is a compaction-request - const historyResult = await this.historyService.getHistory(this.workspaceId); - if (!historyResult.success) { - return false; - } - - const messages = historyResult.data; - const lastUserMsg = [...messages].reverse().find((m) => m.role === "user"); - const isCompaction = lastUserMsg?.metadata?.muxMetadata?.type === "compaction-request"; - - if (!isCompaction || !lastUserMsg) { - return false; - } - - // Ctrl+C flow: abandonPartial=true means user cancelled, skip compaction - if (event.abandonPartial === true) { - return false; - } - - // Ctrl+A flow: Accept early with [truncated] sentinel - // Get the truncated message from historyResult.data - const lastMessage = messages[messages.length - 1]; - if (!lastMessage || lastMessage.role !== "assistant") { - console.warn("[AgentSession] Compaction aborted but last message is not assistant"); - return false; - } - - const partialSummary = lastMessage.parts - .filter((part): part is { type: "text"; text: string } => part.type === "text") - .map((part) => part.text) - .join(""); - - // Append [truncated] sentinel - const truncatedSummary = partialSummary.trim() + "\n\n[truncated]"; - - // Perform compaction with truncated summary - const result = await this.performCompaction(truncatedSummary, { - model: lastMessage.metadata?.model ?? "unknown", - usage: event.metadata?.usage, - duration: event.metadata?.duration, - providerMetadata: lastMessage.metadata?.providerMetadata, - systemMessageTokens: lastMessage.metadata?.systemMessageTokens, - }); - if (!result.success) { - console.error("[AgentSession] Early compaction failed:", result.error); - return false; - } - - this.emitChatEvent(event); - return true; - } - - /** - * Handle compaction stream completion - * - * Detects when a compaction stream finishes, extracts the summary, - * and performs history replacement atomically. - */ - private async handleCompactionCompletion(event: StreamEndEvent): Promise { - // Check if the last user message is a compaction-request - const historyResult = await this.historyService.getHistory(this.workspaceId); - if (!historyResult.success) { - return false; - } - - const messages = historyResult.data; - const lastUserMsg = [...messages].reverse().find((m) => m.role === "user"); - const isCompaction = lastUserMsg?.metadata?.muxMetadata?.type === "compaction-request"; - - if (!isCompaction || !lastUserMsg) { - return false; - } - - // Dedupe: If we've already processed this compaction-request, skip - if (this.processedCompactionRequestIds.has(lastUserMsg.id)) { - return true; - } - - const summary = event.parts - .filter((part): part is { type: "text"; text: string } => part.type === "text") - .map((part) => part.text) - .join(""); - - // Mark as processed before performing compaction - this.processedCompactionRequestIds.add(lastUserMsg.id); - - const result = await this.performCompaction(summary, event.metadata); - if (!result.success) { - console.error("[AgentSession] Compaction failed:", result.error); - return false; - } - - // Emit stream-end to frontend so UI knows compaction is complete - this.emitCompactionStreamEnd(event); - return true; - } - - /** - * Perform history compaction by replacing all messages with a summary - * - * Steps: - * 1. Calculate cumulative usage from all messages (for historicalUsage field) - * 2. Clear entire history and get deleted sequence numbers - * 3. Append summary message with metadata - * 4. Emit delete event for old messages - * 5. Emit summary message to frontend - */ - private async performCompaction( - summary: string, - metadata: { - model: string; - usage?: LanguageModelV2Usage; - duration?: number; - providerMetadata?: Record; - systemMessageTokens?: number; - } - ): Promise> { - // Get all messages to calculate cumulative usage - const historyResult = await this.historyService.getHistory(this.workspaceId); - if (!historyResult.success) { - return Err(`Failed to get history for usage calculation: ${historyResult.error}`); - } - - const usageHistory = accUsageHistory(historyResult.data, undefined); - - const historicalUsage = usageHistory.length > 0 ? sumUsageHistory(usageHistory) : undefined; - - // Clear entire history and get deleted sequences - const clearResult = await this.historyService.clearHistory(this.workspaceId); - if (!clearResult.success) { - return Err(`Failed to clear history: ${clearResult.error}`); - } - const deletedSequences = clearResult.data; - - // Create summary message with metadata - const summaryMessage = createMuxMessage( - `summary-${Date.now()}-${Math.random().toString(36).substring(2, 11)}`, - "assistant", - summary, - { - timestamp: Date.now(), - compacted: true, - model: metadata.model, - usage: metadata.usage, - historicalUsage, - providerMetadata: metadata.providerMetadata, - duration: metadata.duration, - systemMessageTokens: metadata.systemMessageTokens, - muxMetadata: { type: "normal" }, - } - ); - - // Append summary to history - const appendResult = await this.historyService.appendToHistory( - this.workspaceId, - summaryMessage - ); - if (!appendResult.success) { - return Err(`Failed to append summary: ${appendResult.error}`); - } - - // Emit delete event for old messages - if (deletedSequences.length > 0) { - const deleteMessage: DeleteMessage = { - type: "delete", - historySequences: deletedSequences, - }; - this.emitChatEvent(deleteMessage); - } - - // Emit summary message to frontend - this.emitChatEvent(summaryMessage); - - return Ok(undefined); - } - - /** - * Emit stream-end event after compaction completes - * This notifies the frontend that the stream is done - */ - private emitCompactionStreamEnd(event: StreamEndEvent): void { - this.emitChatEvent(event); - } } diff --git a/src/node/services/compactionHandler.ts b/src/node/services/compactionHandler.ts new file mode 100644 index 000000000..d61059236 --- /dev/null +++ b/src/node/services/compactionHandler.ts @@ -0,0 +1,232 @@ +import type { EventEmitter } from "events"; +import type { HistoryService } from "./historyService"; +import type { StreamEndEvent, StreamAbortEvent } from "@/common/types/stream"; +import type { WorkspaceChatMessage, DeleteMessage } from "@/common/types/ipc"; +import type { Result } from "@/common/types/result"; +import { Ok, Err } from "@/common/types/result"; +import type { LanguageModelV2Usage } from "@ai-sdk/provider"; +import { cumUsageHistory } from "@/common/utils/tokens/displayUsage"; +import { sumUsageHistory } from "@/common/utils/tokens/usageAggregator"; +import { createMuxMessage } from "@/common/types/message"; + +interface CompactionHandlerOptions { + workspaceId: string; + historyService: HistoryService; + emitter: EventEmitter; +} + +/** + * Handles history compaction for agent sessions + * + * Responsible for: + * - Detecting compaction requests in stream events + * - Handling Ctrl+C (cancel) and Ctrl+A (accept early) flows + * - Replacing chat history with compacted summaries + * - Preserving cumulative usage across compactions + */ +export class CompactionHandler { + private readonly workspaceId: string; + private readonly historyService: HistoryService; + private readonly emitter: EventEmitter; + private readonly processedIds: Set = new Set(); + + constructor(options: CompactionHandlerOptions) { + this.workspaceId = options.workspaceId; + this.historyService = options.historyService; + this.emitter = options.emitter; + } + + /** + * Handle compaction stream abort (Ctrl+C cancel or Ctrl+A accept early) + * + * Two flows: + * - Ctrl+C: abandonPartial=true → skip compaction + * - Ctrl+A: abandonPartial=false/undefined → perform compaction with [truncated] + */ + async handleAbort(event: StreamAbortEvent): Promise { + // Check if the last user message is a compaction-request + const historyResult = await this.historyService.getHistory(this.workspaceId); + if (!historyResult.success) { + return false; + } + + const messages = historyResult.data; + const lastUserMsg = [...messages].reverse().find((m) => m.role === "user"); + const isCompaction = lastUserMsg?.metadata?.muxMetadata?.type === "compaction-request"; + + if (!isCompaction || !lastUserMsg) { + return false; + } + + // Ctrl+C flow: abandonPartial=true means user cancelled, skip compaction + if (event.abandonPartial === true) { + return false; + } + + // Ctrl+A flow: Accept early with [truncated] sentinel + // Get the truncated message from historyResult.data + const lastMessage = messages[messages.length - 1]; + if (!lastMessage || lastMessage.role !== "assistant") { + console.warn("[CompactionHandler] Compaction aborted but last message is not assistant"); + return false; + } + + const partialSummary = lastMessage.parts + .filter((part): part is { type: "text"; text: string } => part.type === "text") + .map((part) => part.text) + .join(""); + + // Append [truncated] sentinel + const truncatedSummary = partialSummary.trim() + "\n\n[truncated]"; + + // Perform compaction with truncated summary + const result = await this.performCompaction(truncatedSummary, { + model: lastMessage.metadata?.model ?? "unknown", + usage: event.metadata?.usage, + duration: event.metadata?.duration, + providerMetadata: lastMessage.metadata?.providerMetadata, + systemMessageTokens: lastMessage.metadata?.systemMessageTokens, + }); + if (!result.success) { + console.error("[CompactionHandler] Early compaction failed:", result.error); + return false; + } + + this.emitChatEvent(event); + return true; + } + + /** + * Handle compaction stream completion + * + * Detects when a compaction stream finishes, extracts the summary, + * and performs history replacement atomically. + */ + async handleCompletion(event: StreamEndEvent): Promise { + // Check if the last user message is a compaction-request + const historyResult = await this.historyService.getHistory(this.workspaceId); + if (!historyResult.success) { + return false; + } + + const messages = historyResult.data; + const lastUserMsg = [...messages].reverse().find((m) => m.role === "user"); + const isCompaction = lastUserMsg?.metadata?.muxMetadata?.type === "compaction-request"; + + if (!isCompaction || !lastUserMsg) { + return false; + } + + // Dedupe: If we've already processed this compaction-request, skip + if (this.processedIds.has(lastUserMsg.id)) { + return true; + } + + const summary = event.parts + .filter((part): part is { type: "text"; text: string } => part.type === "text") + .map((part) => part.text) + .join(""); + + // Mark as processed before performing compaction + this.processedIds.add(lastUserMsg.id); + + const result = await this.performCompaction(summary, event.metadata); + if (!result.success) { + console.error("[CompactionHandler] Compaction failed:", result.error); + return false; + } + + // Emit stream-end to frontend so UI knows compaction is complete + this.emitChatEvent(event); + return true; + } + + /** + * Perform history compaction by replacing all messages with a summary + * + * Steps: + * 1. Calculate cumulative usage from all messages (for historicalUsage field) + * 2. Clear entire history and get deleted sequence numbers + * 3. Append summary message with metadata + * 4. Emit delete event for old messages + * 5. Emit summary message to frontend + */ + private async performCompaction( + summary: string, + metadata: { + model: string; + usage?: LanguageModelV2Usage; + duration?: number; + providerMetadata?: Record; + systemMessageTokens?: number; + } + ): Promise> { + // Get all messages to calculate cumulative usage + const historyResult = await this.historyService.getHistory(this.workspaceId); + if (!historyResult.success) { + return Err(`Failed to get history for usage calculation: ${historyResult.error}`); + } + + const usageHistory = cumUsageHistory(historyResult.data, undefined); + + const historicalUsage = usageHistory.length > 0 ? sumUsageHistory(usageHistory) : undefined; + + // Clear entire history and get deleted sequences + const clearResult = await this.historyService.clearHistory(this.workspaceId); + if (!clearResult.success) { + return Err(`Failed to clear history: ${clearResult.error}`); + } + const deletedSequences = clearResult.data; + + // Create summary message with metadata + const summaryMessage = createMuxMessage( + `summary-${Date.now()}-${Math.random().toString(36).substring(2, 11)}`, + "assistant", + summary, + { + timestamp: Date.now(), + compacted: true, + model: metadata.model, + usage: metadata.usage, + historicalUsage, + providerMetadata: metadata.providerMetadata, + duration: metadata.duration, + systemMessageTokens: metadata.systemMessageTokens, + muxMetadata: { type: "normal" }, + } + ); + + // Append summary to history + const appendResult = await this.historyService.appendToHistory( + this.workspaceId, + summaryMessage + ); + if (!appendResult.success) { + return Err(`Failed to append summary: ${appendResult.error}`); + } + + // Emit delete event for old messages + if (deletedSequences.length > 0) { + const deleteMessage: DeleteMessage = { + type: "delete", + historySequences: deletedSequences, + }; + this.emitChatEvent(deleteMessage); + } + + // Emit summary message to frontend + this.emitChatEvent(summaryMessage); + + return Ok(undefined); + } + + /** + * Emit chat event through the session's emitter + */ + private emitChatEvent(message: WorkspaceChatMessage): void { + this.emitter.emit("chat-event", { + workspaceId: this.workspaceId, + message, + }); + } +} From 7b3d1efef3413780804cfcc42914dbee3a0c910f Mon Sep 17 00:00:00 2001 From: ethan Date: Wed, 19 Nov 2025 13:30:09 +1100 Subject: [PATCH 05/13] bump test timeout --- tests/ipcMain/sendMessage.test.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/ipcMain/sendMessage.test.ts b/tests/ipcMain/sendMessage.test.ts index 61d4113d6..040505ca8 100644 --- a/tests/ipcMain/sendMessage.test.ts +++ b/tests/ipcMain/sendMessage.test.ts @@ -1380,7 +1380,7 @@ These are general instructions that apply to all modes. // Wait for first stream to complete const collector1 = createEventCollector(env.sentEvents, workspaceId); - await collector1.waitForEvent("stream-end", 30000); + await collector1.waitForEvent("stream-end", 60000); assertStreamSuccess(collector1); // 2) Validate UI/history has a dynamic-tool part with a real diff string From 98e0d05b4f478a33f0032fb84b87a096a140e1ab Mon Sep 17 00:00:00 2001 From: ethan Date: Wed, 19 Nov 2025 13:40:02 +1100 Subject: [PATCH 06/13] rename --- src/node/services/compactionHandler.ts | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/node/services/compactionHandler.ts b/src/node/services/compactionHandler.ts index d61059236..ba94656f5 100644 --- a/src/node/services/compactionHandler.ts +++ b/src/node/services/compactionHandler.ts @@ -28,7 +28,7 @@ export class CompactionHandler { private readonly workspaceId: string; private readonly historyService: HistoryService; private readonly emitter: EventEmitter; - private readonly processedIds: Set = new Set(); + private readonly processedCompactionRequestIds: Set = new Set(); constructor(options: CompactionHandlerOptions) { this.workspaceId = options.workspaceId; @@ -118,7 +118,7 @@ export class CompactionHandler { } // Dedupe: If we've already processed this compaction-request, skip - if (this.processedIds.has(lastUserMsg.id)) { + if (this.processedCompactionRequestIds.has(lastUserMsg.id)) { return true; } @@ -128,7 +128,7 @@ export class CompactionHandler { .join(""); // Mark as processed before performing compaction - this.processedIds.add(lastUserMsg.id); + this.processedCompactionRequestIds.add(lastUserMsg.id); const result = await this.performCompaction(summary, event.metadata); if (!result.success) { From a6bef39af1921394a98e2dd86bc8278586a8b271 Mon Sep 17 00:00:00 2001 From: ethan Date: Wed, 19 Nov 2025 16:23:06 +1100 Subject: [PATCH 07/13] dont fetch messages twice during compaction --- src/node/services/compactionHandler.ts | 15 +++++---------- 1 file changed, 5 insertions(+), 10 deletions(-) diff --git a/src/node/services/compactionHandler.ts b/src/node/services/compactionHandler.ts index ba94656f5..5787a65fe 100644 --- a/src/node/services/compactionHandler.ts +++ b/src/node/services/compactionHandler.ts @@ -7,7 +7,7 @@ import { Ok, Err } from "@/common/types/result"; import type { LanguageModelV2Usage } from "@ai-sdk/provider"; import { cumUsageHistory } from "@/common/utils/tokens/displayUsage"; import { sumUsageHistory } from "@/common/utils/tokens/usageAggregator"; -import { createMuxMessage } from "@/common/types/message"; +import { createMuxMessage, MuxMessage } from "@/common/types/message"; interface CompactionHandlerOptions { workspaceId: string; @@ -80,7 +80,7 @@ export class CompactionHandler { const truncatedSummary = partialSummary.trim() + "\n\n[truncated]"; // Perform compaction with truncated summary - const result = await this.performCompaction(truncatedSummary, { + const result = await this.performCompaction(truncatedSummary, messages, { model: lastMessage.metadata?.model ?? "unknown", usage: event.metadata?.usage, duration: event.metadata?.duration, @@ -130,7 +130,7 @@ export class CompactionHandler { // Mark as processed before performing compaction this.processedCompactionRequestIds.add(lastUserMsg.id); - const result = await this.performCompaction(summary, event.metadata); + const result = await this.performCompaction(summary, messages,event.metadata); if (!result.success) { console.error("[CompactionHandler] Compaction failed:", result.error); return false; @@ -153,6 +153,7 @@ export class CompactionHandler { */ private async performCompaction( summary: string, + messages: MuxMessage[], metadata: { model: string; usage?: LanguageModelV2Usage; @@ -161,13 +162,7 @@ export class CompactionHandler { systemMessageTokens?: number; } ): Promise> { - // Get all messages to calculate cumulative usage - const historyResult = await this.historyService.getHistory(this.workspaceId); - if (!historyResult.success) { - return Err(`Failed to get history for usage calculation: ${historyResult.error}`); - } - - const usageHistory = cumUsageHistory(historyResult.data, undefined); + const usageHistory = cumUsageHistory(messages, undefined); const historicalUsage = usageHistory.length > 0 ? sumUsageHistory(usageHistory) : undefined; From 256e7aabcb570147705b31f6c53db89035b2cb1d Mon Sep 17 00:00:00 2001 From: ethan Date: Thu, 20 Nov 2025 13:53:57 +1100 Subject: [PATCH 08/13] fixup --- src/node/services/compactionHandler.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/node/services/compactionHandler.ts b/src/node/services/compactionHandler.ts index 5787a65fe..9e59eb99a 100644 --- a/src/node/services/compactionHandler.ts +++ b/src/node/services/compactionHandler.ts @@ -130,7 +130,7 @@ export class CompactionHandler { // Mark as processed before performing compaction this.processedCompactionRequestIds.add(lastUserMsg.id); - const result = await this.performCompaction(summary, messages,event.metadata); + const result = await this.performCompaction(summary, messages, event.metadata); if (!result.success) { console.error("[CompactionHandler] Compaction failed:", result.error); return false; From 75bea1305e4a96ddb8132d251c99628b78111878 Mon Sep 17 00:00:00 2001 From: ethan Date: Thu, 20 Nov 2025 13:55:17 +1100 Subject: [PATCH 09/13] =?UTF-8?q?=F0=9F=A4=96=20feat:=20add=20progressive?= =?UTF-8?q?=20compaction=20warnings?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Show countdown starting at 60% usage (10% before threshold) - Display 'Context left until Auto-Compact: X% remaining' message - Switch to urgent message at 70% threshold - Centralize threshold logic in shouldAutoCompact() utility - Pass image parts through compaction continue messages - Structure for future threshold configurability --- src/browser/components/AIView.tsx | 25 +++- src/browser/components/ChatInput/index.tsx | 123 +++++++++++++----- src/browser/components/ChatInput/types.ts | 2 + src/browser/components/CompactionWarning.tsx | 36 +++++ src/browser/hooks/useResumeManager.ts | 5 +- src/browser/stores/WorkspaceStore.ts | 7 +- src/browser/utils/chatCommands.ts | 23 +++- .../utils/compaction/autoCompactionCheck.ts | 105 +++++++++++++++ .../messages/StreamingMessageAggregator.ts | 5 +- src/common/types/message.ts | 8 +- src/node/services/agentSession.ts | 3 +- src/node/services/compactionHandler.ts | 2 +- 12 files changed, 298 insertions(+), 46 deletions(-) create mode 100644 src/browser/components/CompactionWarning.tsx create mode 100644 src/browser/utils/compaction/autoCompactionCheck.ts diff --git a/src/browser/components/AIView.tsx b/src/browser/components/AIView.tsx index 6efd7c040..f37f14d7b 100644 --- a/src/browser/components/AIView.tsx +++ b/src/browser/components/AIView.tsx @@ -20,7 +20,11 @@ import { formatKeybind, KEYBINDS } from "@/browser/utils/ui/keybinds"; import { useAutoScroll } from "@/browser/hooks/useAutoScroll"; import { usePersistedState } from "@/browser/hooks/usePersistedState"; import { useThinking } from "@/browser/contexts/ThinkingContext"; -import { useWorkspaceState, useWorkspaceAggregator } from "@/browser/stores/WorkspaceStore"; +import { + useWorkspaceState, + useWorkspaceAggregator, + useWorkspaceUsage, +} from "@/browser/stores/WorkspaceStore"; import { WorkspaceHeader } from "./WorkspaceHeader"; import { getModelName } from "@/common/utils/ai/models"; import type { DisplayedMessage } from "@/common/types/message"; @@ -28,6 +32,9 @@ import type { RuntimeConfig } from "@/common/types/runtime"; import { useAIViewKeybinds } from "@/browser/hooks/useAIViewKeybinds"; import { evictModelFromLRU } from "@/browser/hooks/useModelLRU"; import { QueuedMessage } from "./Messages/QueuedMessage"; +import { CompactionWarning } from "./CompactionWarning"; +import { shouldAutoCompact } from "@/browser/utils/compaction/autoCompactionCheck"; +import { use1MContext } from "@/browser/hooks/use1MContext"; interface AIViewProps { workspaceId: string; @@ -71,6 +78,8 @@ const AIViewInner: React.FC = ({ const workspaceState = useWorkspaceState(workspaceId); const aggregator = useWorkspaceAggregator(workspaceId); + const workspaceUsage = useWorkspaceUsage(workspaceId); + const [use1M] = use1MContext(); const handledModelErrorsRef = useRef>(new Set()); useEffect(() => { @@ -311,6 +320,13 @@ const AIViewInner: React.FC = ({ // Get active stream message ID for token counting const activeStreamMessageId = aggregator.getActiveStreamMessageId(); + const autoCompactionCheck = currentModel + ? shouldAutoCompact(workspaceUsage, currentModel, use1M) + : { shouldShowWarning: false, usagePercentage: 0, thresholdPercentage: 70 }; + + // Show warning when: shouldShowWarning flag is true AND not currently compacting + const shouldShowCompactionWarning = !isCompacting && autoCompactionCheck.shouldShowWarning; + // Note: We intentionally do NOT reset autoRetry when streams start. // If user pressed the interrupt key, autoRetry stays false until they manually retry. // This makes state transitions explicit and predictable. @@ -496,6 +512,12 @@ const AIViewInner: React.FC = ({ )} + {shouldShowCompactionWarning && ( + + )} = ({ onEditLastUserMessage={() => void handleEditLastUserMessage()} canInterrupt={canInterrupt} onReady={handleChatInputReady} + autoCompactionCheck={autoCompactionCheck} /> diff --git a/src/browser/components/ChatInput/index.tsx b/src/browser/components/ChatInput/index.tsx index 20bbb1827..4e2e92257 100644 --- a/src/browser/components/ChatInput/index.tsx +++ b/src/browser/components/ChatInput/index.tsx @@ -30,6 +30,7 @@ import { handleCompactCommand, forkWorkspace, prepareCompactionMessage, + executeCompaction, type CommandHandlerContext, } from "@/browser/utils/chatCommands"; import { CUSTOM_EVENTS } from "@/common/constants/events"; @@ -468,6 +469,32 @@ export const ChatInput: React.FC = (props) => { // Workspace variant: full command handling + message send if (variant !== "workspace") return; // Type guard + // Prepare image parts if any + const imageParts = imageAttachments.map((img, index) => { + // Validate before sending to help with debugging + if (!img.url || typeof img.url !== "string") { + console.error( + `Image attachment [${index}] has invalid url:`, + typeof img.url, + img.url?.slice(0, 50) + ); + } + if (!img.url?.startsWith("data:")) { + console.error(`Image attachment [${index}] url is not a data URL:`, img.url?.slice(0, 100)); + } + if (!img.mediaType || typeof img.mediaType !== "string") { + console.error( + `Image attachment [${index}] has invalid mediaType:`, + typeof img.mediaType, + img.mediaType + ); + } + return { + url: img.url, + mediaType: img.mediaType, + }; + }); + try { // Parse command const parsed = parseCommand(messageText); @@ -567,8 +594,10 @@ export const ChatInput: React.FC = (props) => { const context: CommandHandlerContext = { workspaceId: props.workspaceId, sendMessageOptions, + imageParts, editMessageId: editingMessage?.id, setInput, + setImageAttachments, setIsSending, setToast, onCancelEdit: props.onCancelEdit, @@ -632,7 +661,9 @@ export const ChatInput: React.FC = (props) => { const context: CommandHandlerContext = { workspaceId: props.workspaceId, sendMessageOptions, + imageParts: undefined, // /new doesn't use images setInput, + setImageAttachments, setIsSending, setToast, }; @@ -652,42 +683,70 @@ export const ChatInput: React.FC = (props) => { } } - // Regular message - send directly via API - setIsSending(true); - // Save current state for restoration on error const previousImageAttachments = [...imageAttachments]; - try { - // Prepare image parts if any - const imageParts = imageAttachments.map((img, index) => { - // Validate before sending to help with debugging - if (!img.url || typeof img.url !== "string") { - console.error( - `Image attachment [${index}] has invalid url:`, - typeof img.url, - img.url?.slice(0, 50) - ); - } - if (!img.url?.startsWith("data:")) { - console.error( - `Image attachment [${index}] url is not a data URL:`, - img.url?.slice(0, 100) - ); - } - if (!img.mediaType || typeof img.mediaType !== "string") { - console.error( - `Image attachment [${index}] has invalid mediaType:`, - typeof img.mediaType, - img.mediaType - ); + // Auto-compaction check (workspace variant only) + // Check if we should auto-compact before sending this message + // Result is computed in parent (AIView) and passed down to avoid duplicate calculation + const shouldAutoCompact = + props.autoCompactionCheck && + props.autoCompactionCheck.usagePercentage >= props.autoCompactionCheck.thresholdPercentage; + if (variant === "workspace" && !editingMessage && shouldAutoCompact) { + // Clear input immediately for responsive UX + setInput(""); + setImageAttachments([]); + setIsSending(true); + + try { + const result = await executeCompaction({ + workspaceId: props.workspaceId, + continueMessage: { + text: messageText, + imageParts, + }, + sendMessageOptions, + }); + + if (!result.success) { + // Restore on error + setInput(messageText); + setImageAttachments(previousImageAttachments); + setToast({ + id: Date.now().toString(), + type: "error", + title: "Auto-Compaction Failed", + message: result.error ?? "Failed to start auto-compaction", + }); + } else { + setToast({ + id: Date.now().toString(), + type: "success", + message: `Context threshold reached - auto-compacting...`, + }); } - return { - url: img.url, - mediaType: img.mediaType, - }; - }); + } catch (error) { + // Restore on unexpected error + setInput(messageText); + setImageAttachments(previousImageAttachments); + setToast({ + id: Date.now().toString(), + type: "error", + title: "Auto-Compaction Failed", + message: + error instanceof Error ? error.message : "Unexpected error during auto-compaction", + }); + } finally { + setIsSending(false); + } + return; // Skip normal send + } + + // Regular message - send directly via API + setIsSending(true); + + try { // When editing a /compact command, regenerate the actual summarization request let actualMessageText = messageText; let muxMetadata: MuxFrontendMetadata | undefined; @@ -703,7 +762,7 @@ export const ChatInput: React.FC = (props) => { } = prepareCompactionMessage({ workspaceId: props.workspaceId, maxOutputTokens: parsed.maxOutputTokens, - continueMessage: parsed.continueMessage, + continueMessage: { text: parsed.continueMessage ?? "", imageParts }, model: parsed.model, sendMessageOptions, }); diff --git a/src/browser/components/ChatInput/types.ts b/src/browser/components/ChatInput/types.ts index 25f7979c9..324c6e12d 100644 --- a/src/browser/components/ChatInput/types.ts +++ b/src/browser/components/ChatInput/types.ts @@ -1,5 +1,6 @@ import type { ImagePart } from "@/common/types/ipc"; import type { FrontendWorkspaceMetadata } from "@/common/types/workspace"; +import type { AutoCompactionCheckResult } from "@/browser/utils/compaction/autoCompactionCheck"; export interface ChatInputAPI { focus: () => void; @@ -23,6 +24,7 @@ export interface ChatInputWorkspaceVariant { canInterrupt?: boolean; disabled?: boolean; onReady?: (api: ChatInputAPI) => void; + autoCompactionCheck?: AutoCompactionCheckResult; // Computed in parent (AIView) to avoid duplicate calculation } // Creation variant: simplified for first message / workspace creation diff --git a/src/browser/components/CompactionWarning.tsx b/src/browser/components/CompactionWarning.tsx new file mode 100644 index 000000000..7170917d9 --- /dev/null +++ b/src/browser/components/CompactionWarning.tsx @@ -0,0 +1,36 @@ +import React from "react"; + +/** + * Warning banner shown when context usage is approaching the compaction threshold. + * + * Displays progressive warnings: + * - Below threshold: "Context left until Auto-Compact: X% remaining" (where X = threshold - current) + * - At/above threshold: "Approaching context limit. Next message will trigger auto-compaction." + * + * Displayed above ChatInput when: + * - Token usage >= (threshold - 10%) of model's context window + * - Not currently compacting (user can still send messages) + * + * @param usagePercentage - Current token usage as percentage (0-100) + * @param thresholdPercentage - Auto-compaction trigger threshold (0-100, default 70) + */ +export const CompactionWarning: React.FC<{ + usagePercentage: number; + thresholdPercentage: number; +}> = (props) => { + // At threshold or above, next message will trigger compaction + const willCompactNext = props.usagePercentage >= props.thresholdPercentage; + + // Calculate remaining percentage until threshold + const remaining = props.thresholdPercentage - props.usagePercentage; + + const message = willCompactNext + ? "⚠️ Approaching context limit. Next message will trigger auto-compaction." + : `Context left until Auto-Compact: ${Math.round(remaining)}%`; + + return ( +
+ {message} +
+ ); +}; diff --git a/src/browser/hooks/useResumeManager.ts b/src/browser/hooks/useResumeManager.ts index 507ab7523..afe5a0fcb 100644 --- a/src/browser/hooks/useResumeManager.ts +++ b/src/browser/hooks/useResumeManager.ts @@ -171,7 +171,10 @@ export function useResumeManager() { if (lastUserMsg?.compactionRequest) { // Apply compaction overrides using shared function (same as ChatInput) // This ensures custom model/tokens are preserved across resume - options = applyCompactionOverrides(options, lastUserMsg.compactionRequest.parsed); + options = applyCompactionOverrides(options, { + maxOutputTokens: lastUserMsg.compactionRequest.parsed.maxOutputTokens, + continueMessage: { text: lastUserMsg.compactionRequest.parsed.continueMessage ?? "" }, + }); } } diff --git a/src/browser/stores/WorkspaceStore.ts b/src/browser/stores/WorkspaceStore.ts index ce447a3cf..78c20189f 100644 --- a/src/browser/stores/WorkspaceStore.ts +++ b/src/browser/stores/WorkspaceStore.ts @@ -424,11 +424,14 @@ export class WorkspaceStore { * Extract usage from messages (no tokenization). * Each usage entry calculated with its own model for accurate costs. * - * REQUIRES: Workspace must have been added via addWorkspace() first. + * Returns empty state if workspace doesn't exist (e.g., creation mode). */ getWorkspaceUsage(workspaceId: string): WorkspaceUsageState { return this.usageStore.get(workspaceId, () => { - const aggregator = this.assertGet(workspaceId); + const aggregator = this.aggregators.get(workspaceId); + if (!aggregator) { + return { usageHistory: [], totalTokens: 0 }; + } const messages = aggregator.getAllMessages(); const model = aggregator.getCurrentModel(); diff --git a/src/browser/utils/chatCommands.ts b/src/browser/utils/chatCommands.ts index 39f63800b..499d9aec1 100644 --- a/src/browser/utils/chatCommands.ts +++ b/src/browser/utils/chatCommands.ts @@ -6,8 +6,12 @@ * to ensure consistent behavior and avoid duplication. */ -import type { SendMessageOptions } from "@/common/types/ipc"; -import type { MuxFrontendMetadata, CompactionRequestData } from "@/common/types/message"; +import type { SendMessageOptions, ImagePart } from "@/common/types/ipc"; +import type { + MuxFrontendMetadata, + CompactionRequestData, + ContinueMessage, +} from "@/common/types/message"; import type { FrontendWorkspaceMetadata } from "@/common/types/workspace"; import type { RuntimeConfig } from "@/common/types/runtime"; import { RUNTIME_MODE, SSH_RUNTIME_PREFIX } from "@/common/types/runtime"; @@ -17,6 +21,7 @@ import type { ParsedCommand } from "@/browser/utils/slashCommands/types"; import { applyCompactionOverrides } from "@/browser/utils/messages/compactionOptions"; import { resolveCompactionModel } from "@/browser/utils/messages/compactionModelPreference"; import { getRuntimeKey } from "@/common/constants/storage"; +import { ImageAttachment } from "../components/ImageAttachments"; // ============================================================================ // Workspace Creation @@ -177,7 +182,7 @@ export { forkWorkspace } from "./workspaceFork"; export interface CompactionOptions { workspaceId: string; maxOutputTokens?: number; - continueMessage?: string; + continueMessage?: ContinueMessage; model?: string; sendMessageOptions: SendMessageOptions; editMessageId?: string; @@ -203,7 +208,7 @@ export function prepareCompactionMessage(options: CompactionOptions): { let messageText = `Summarize this conversation into a compact form for a new Assistant to continue helping the user. Use approximately ${targetWords} words.`; if (options.continueMessage) { - messageText += `\n\nThe user wants to continue with: ${options.continueMessage}`; + messageText += `\n\nThe user wants to continue with: ${options.continueMessage.text}`; } // Handle model preference (sticky globally) @@ -267,7 +272,7 @@ function formatCompactionCommand(options: CompactionOptions): string { cmd += ` -m ${options.model}`; } if (options.continueMessage) { - cmd += `\n${options.continueMessage}`; + cmd += `\n${options.continueMessage.text}`; } return cmd; } @@ -279,8 +284,10 @@ function formatCompactionCommand(options: CompactionOptions): string { export interface CommandHandlerContext { workspaceId: string; sendMessageOptions: SendMessageOptions; + imageParts?: ImagePart[]; editMessageId?: string; setInput: (value: string) => void; + setImageAttachments: (images: ImageAttachment[]) => void; setIsSending: (value: boolean) => void; setToast: (toast: Toast) => void; onCancelEdit?: () => void; @@ -394,19 +401,23 @@ export async function handleCompactCommand( sendMessageOptions, editMessageId, setInput, + setImageAttachments, setIsSending, setToast, onCancelEdit, } = context; setInput(""); + setImageAttachments([]); setIsSending(true); try { const result = await executeCompaction({ workspaceId, maxOutputTokens: parsed.maxOutputTokens, - continueMessage: parsed.continueMessage, + continueMessage: parsed.continueMessage + ? { text: parsed.continueMessage, imageParts: context.imageParts } + : undefined, model: parsed.model, sendMessageOptions, editMessageId, diff --git a/src/browser/utils/compaction/autoCompactionCheck.ts b/src/browser/utils/compaction/autoCompactionCheck.ts new file mode 100644 index 000000000..b5c84cb80 --- /dev/null +++ b/src/browser/utils/compaction/autoCompactionCheck.ts @@ -0,0 +1,105 @@ +/** + * Auto-compaction threshold checking + * + * Determines whether auto-compaction should trigger based on current token usage + * as a percentage of the model's context window. + * + * Auto-compaction triggers when: + * - Usage data is available (has at least one API response) + * - Model has known max_input_tokens + * - Usage exceeds threshold (default 70%) + * + * Safe defaults: + * - Returns false if no usage data (first message) + * - Returns false if model stats unavailable (unknown model) + * - Never triggers in edit mode (caller's responsibility to check) + */ + +import type { WorkspaceUsageState } from "@/browser/stores/WorkspaceStore"; +import { getModelStats } from "@/common/utils/tokens/modelStats"; +import { supports1MContext } from "@/common/utils/ai/models"; + +export interface AutoCompactionCheckResult { + shouldShowWarning: boolean; + usagePercentage: number; + thresholdPercentage: number; +} + +// Auto-compaction threshold (0.7 = 70%) +// TODO: Make this configurable via settings +const AUTO_COMPACTION_THRESHOLD = 0.7; + +// Show warning this many percentage points before threshold +const WARNING_ADVANCE_PERCENT = 10; + +/** + * Check if auto-compaction should trigger based on token usage + * + * @param usage - Current workspace usage state (from useWorkspaceUsage) + * @param model - Current model string + * @param use1M - Whether 1M context is enabled + * @param threshold - Usage percentage threshold (0.0-1.0, default 0.7 = 70%) + * @param warningAdvancePercent - Show warning this many percentage points before threshold (default 10) + * @returns Check result with shouldAutoCompact flag, warning flag, and usage details + */ +export function shouldAutoCompact( + usage: WorkspaceUsageState | undefined, + model: string, + use1M: boolean, + threshold: number = AUTO_COMPACTION_THRESHOLD, + warningAdvancePercent: number = WARNING_ADVANCE_PERCENT +): AutoCompactionCheckResult { + const thresholdPercentage = threshold * 100; + + // No usage data yet - safe default (don't trigger on first message) + if (!usage || usage.usageHistory.length === 0) { + return { + shouldShowWarning: false, + usagePercentage: 0, + thresholdPercentage, + }; + } + + // Get last usage (most recent API response) + const lastUsage = usage.usageHistory[usage.usageHistory.length - 1]; + if (!lastUsage) { + return { + shouldShowWarning: false, + usagePercentage: 0, + thresholdPercentage, + }; + } + + // Determine max tokens for this model + const modelStats = getModelStats(model); + const maxTokens = use1M && supports1MContext(model) ? 1_000_000 : modelStats?.max_input_tokens; + + // No max tokens known - safe default (can't calculate percentage) + if (!maxTokens) { + return { + shouldShowWarning: false, + usagePercentage: 0, + thresholdPercentage, + }; + } + + // Calculate total tokens used in last request + const totalUsed = + lastUsage.input.tokens + + lastUsage.cached.tokens + + lastUsage.cacheCreate.tokens + + lastUsage.output.tokens + + lastUsage.reasoning.tokens; + + // Calculate usage percentage + const usagePercentage = (totalUsed / maxTokens) * 100; + + // Show warning if within advance window (e.g., 60% for 70% threshold with 10% advance) + const shouldShowWarning = usagePercentage >= thresholdPercentage - warningAdvancePercent; + + return { + shouldShowWarning, + usagePercentage, + thresholdPercentage, + }; +} diff --git a/src/browser/utils/messages/StreamingMessageAggregator.ts b/src/browser/utils/messages/StreamingMessageAggregator.ts index e0d1193e1..269155da1 100644 --- a/src/browser/utils/messages/StreamingMessageAggregator.ts +++ b/src/browser/utils/messages/StreamingMessageAggregator.ts @@ -762,7 +762,10 @@ export class StreamingMessageAggregator { muxMeta?.type === "compaction-request" ? { rawCommand: muxMeta.rawCommand, - parsed: muxMeta.parsed, + parsed: { + maxOutputTokens: muxMeta.parsed.maxOutputTokens, + continueMessage: muxMeta.parsed.continueMessage?.text, // Extract text for display + }, } : undefined; diff --git a/src/common/types/message.ts b/src/common/types/message.ts index 0d88b52d4..6e79594ea 100644 --- a/src/common/types/message.ts +++ b/src/common/types/message.ts @@ -5,11 +5,17 @@ import type { ToolPolicy } from "@/common/utils/tools/toolPolicy"; import type { ChatUsageDisplay } from "@/common/utils/tokens/usageAggregator"; import type { ImagePart } from "./ipc"; +// Message to continue with after compaction +export interface ContinueMessage { + text: string; + imageParts?: ImagePart[]; +} + // Parsed compaction request data (shared type for consistency) export interface CompactionRequestData { model?: string; // Custom model override for compaction maxOutputTokens?: number; - continueMessage?: string; + continueMessage?: ContinueMessage; } // Frontend-specific metadata stored in muxMetadata field diff --git a/src/node/services/agentSession.ts b/src/node/services/agentSession.ts index adbe96ee3..4b61503ac 100644 --- a/src/node/services/agentSession.ts +++ b/src/node/services/agentSession.ts @@ -326,9 +326,10 @@ export class AgentSession { // If this is a compaction request with a continue message, queue it for auto-send after compaction const muxMeta = options?.muxMetadata; if (muxMeta?.type === "compaction-request" && muxMeta.parsed.continueMessage && options) { + const { text, imageParts } = muxMeta.parsed.continueMessage; // Strip out edit-specific and compaction-specific fields so the queued message is a fresh user message const { muxMetadata, mode, editMessageId, ...continueOptions } = options; - this.messageQueue.add(muxMeta.parsed.continueMessage, continueOptions); + this.messageQueue.add(text, { ...continueOptions, imageParts }); this.emitQueuedMessageChanged(); } diff --git a/src/node/services/compactionHandler.ts b/src/node/services/compactionHandler.ts index 9e59eb99a..02e6a6b79 100644 --- a/src/node/services/compactionHandler.ts +++ b/src/node/services/compactionHandler.ts @@ -7,7 +7,7 @@ import { Ok, Err } from "@/common/types/result"; import type { LanguageModelV2Usage } from "@ai-sdk/provider"; import { cumUsageHistory } from "@/common/utils/tokens/displayUsage"; import { sumUsageHistory } from "@/common/utils/tokens/usageAggregator"; -import { createMuxMessage, MuxMessage } from "@/common/types/message"; +import { createMuxMessage, type MuxMessage } from "@/common/types/message"; interface CompactionHandlerOptions { workspaceId: string; From 0ef46ab6033b17fd8263f6cfcf925219daf8af5e Mon Sep 17 00:00:00 2001 From: ethan Date: Thu, 20 Nov 2025 14:04:47 +1100 Subject: [PATCH 10/13] =?UTF-8?q?=F0=9F=A4=96=20fix:=20use=20cumulative=20?= =?UTF-8?q?token=20usage=20for=20auto-compaction=20check?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Previous implementation only checked last message tokens against context window, effectively disabling auto-compaction unless a single response exceeded 70%. Now uses WorkspaceUsageState.totalTokens for cumulative conversation tracking. - Remove lastUsage extraction and manual token calculation - Use pre-calculated totalTokens from WorkspaceStore - Simplifies code by ~20 lines - Auto-compaction now correctly triggers at 70% cumulative usage --- src/browser/utils/chatCommands.ts | 2 +- .../utils/compaction/autoCompactionCheck.ts | 24 +++---------------- 2 files changed, 4 insertions(+), 22 deletions(-) diff --git a/src/browser/utils/chatCommands.ts b/src/browser/utils/chatCommands.ts index 499d9aec1..e388f7894 100644 --- a/src/browser/utils/chatCommands.ts +++ b/src/browser/utils/chatCommands.ts @@ -21,7 +21,7 @@ import type { ParsedCommand } from "@/browser/utils/slashCommands/types"; import { applyCompactionOverrides } from "@/browser/utils/messages/compactionOptions"; import { resolveCompactionModel } from "@/browser/utils/messages/compactionModelPreference"; import { getRuntimeKey } from "@/common/constants/storage"; -import { ImageAttachment } from "../components/ImageAttachments"; +import type { ImageAttachment } from "../components/ImageAttachments"; // ============================================================================ // Workspace Creation diff --git a/src/browser/utils/compaction/autoCompactionCheck.ts b/src/browser/utils/compaction/autoCompactionCheck.ts index b5c84cb80..4369eadc4 100644 --- a/src/browser/utils/compaction/autoCompactionCheck.ts +++ b/src/browser/utils/compaction/autoCompactionCheck.ts @@ -40,7 +40,7 @@ const WARNING_ADVANCE_PERCENT = 10; * @param use1M - Whether 1M context is enabled * @param threshold - Usage percentage threshold (0.0-1.0, default 0.7 = 70%) * @param warningAdvancePercent - Show warning this many percentage points before threshold (default 10) - * @returns Check result with shouldAutoCompact flag, warning flag, and usage details + * @returns Check result with warning flag and usage percentage */ export function shouldAutoCompact( usage: WorkspaceUsageState | undefined, @@ -60,16 +60,6 @@ export function shouldAutoCompact( }; } - // Get last usage (most recent API response) - const lastUsage = usage.usageHistory[usage.usageHistory.length - 1]; - if (!lastUsage) { - return { - shouldShowWarning: false, - usagePercentage: 0, - thresholdPercentage, - }; - } - // Determine max tokens for this model const modelStats = getModelStats(model); const maxTokens = use1M && supports1MContext(model) ? 1_000_000 : modelStats?.max_input_tokens; @@ -83,16 +73,8 @@ export function shouldAutoCompact( }; } - // Calculate total tokens used in last request - const totalUsed = - lastUsage.input.tokens + - lastUsage.cached.tokens + - lastUsage.cacheCreate.tokens + - lastUsage.output.tokens + - lastUsage.reasoning.tokens; - - // Calculate usage percentage - const usagePercentage = (totalUsed / maxTokens) * 100; + // Calculate usage percentage from cumulative conversation total + const usagePercentage = (usage.totalTokens / maxTokens) * 100; // Show warning if within advance window (e.g., 60% for 70% threshold with 10% advance) const shouldShowWarning = usagePercentage >= thresholdPercentage - warningAdvancePercent; From 833d4f745a514c7dc02965e46c56e6ec658f10f9 Mon Sep 17 00:00:00 2001 From: ethan Date: Thu, 20 Nov 2025 14:26:13 +1100 Subject: [PATCH 11/13] =?UTF-8?q?=F0=9F=A4=96=20fix:=20correct=20totalToke?= =?UTF-8?q?ns=20to=20use=20last=20entry,=20not=20sum?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Previous calculation summed all usageHistory entries, but each entry already contains cumulative prompt tokens (full context at that turn). This caused massive over-counting in multi-turn conversations. Example: - Turn 1: 1,000 tokens - Turn 2: 2,500 tokens (includes turn 1) - Turn 3: 4,200 tokens (includes turns 1-2) Before: 1,000 + 2,500 + 4,200 = 7,700 tokens (183% inflated) After: 4,200 tokens (correct - just use last entry) This fix ensures auto-compaction triggers at actual 70% usage instead of triggering far earlier due to double/triple counting. --- src/browser/stores/WorkspaceStore.ts | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/src/browser/stores/WorkspaceStore.ts b/src/browser/stores/WorkspaceStore.ts index 78c20189f..01ea6c11f 100644 --- a/src/browser/stores/WorkspaceStore.ts +++ b/src/browser/stores/WorkspaceStore.ts @@ -437,17 +437,17 @@ export class WorkspaceStore { const model = aggregator.getCurrentModel(); const usageHistory = cumUsageHistory(messages, model); - // Calculate total from usage history (now includes historical) - const totalTokens = usageHistory.reduce( - (sum, u) => - sum + - u.input.tokens + - u.cached.tokens + - u.cacheCreate.tokens + - u.output.tokens + - u.reasoning.tokens, - 0 - ); + // Use last entry's total (each entry is cumulative, not a delta) + // Each usageHistory entry contains the FULL prompt tokens for that turn, + // so we only need the most recent value, not a sum + const lastEntry = usageHistory[usageHistory.length - 1]; + const totalTokens = lastEntry + ? lastEntry.input.tokens + + lastEntry.cached.tokens + + lastEntry.cacheCreate.tokens + + lastEntry.output.tokens + + lastEntry.reasoning.tokens + : 0; return { usageHistory, totalTokens }; }); From 278ce7feca5c9c925e7c19b8157251a508405e6b Mon Sep 17 00:00:00 2001 From: ethan Date: Thu, 20 Nov 2025 14:38:37 +1100 Subject: [PATCH 12/13] better messages --- src/browser/components/CompactionWarning.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/browser/components/CompactionWarning.tsx b/src/browser/components/CompactionWarning.tsx index 7170917d9..7688f1bad 100644 --- a/src/browser/components/CompactionWarning.tsx +++ b/src/browser/components/CompactionWarning.tsx @@ -25,7 +25,7 @@ export const CompactionWarning: React.FC<{ const remaining = props.thresholdPercentage - props.usagePercentage; const message = willCompactNext - ? "⚠️ Approaching context limit. Next message will trigger auto-compaction." + ? "⚠️ Context limit reached. Next message will trigger auto-compaction." : `Context left until Auto-Compact: ${Math.round(remaining)}%`; return ( From 52d11bd1debc9f41fb4c63983504b4569bf953a4 Mon Sep 17 00:00:00 2001 From: ethan Date: Thu, 20 Nov 2025 16:40:47 +1100 Subject: [PATCH 13/13] =?UTF-8?q?=F0=9F=A4=96=20feat:=20add=20auto-compact?= =?UTF-8?q?ion=20configuration=20UI?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Add configurable auto-compaction threshold (50-90%) per workspace - Extract threshold constants to ui.ts (DRY) - Create reusable useClampedNumberInput hook for numeric inputs - Add settings to right sidebar with checkbox and percentage input - Wire settings through to shouldAutoCompact check - Use existing HelpIndicator pattern for tooltips --- src/browser/components/AIView.tsx | 13 +++- src/browser/components/Context1MCheckbox.tsx | 4 +- .../RightSidebar/AutoCompactionSettings.tsx | 65 +++++++++++++++++++ .../components/RightSidebar/CostsTab.tsx | 3 + .../hooks/useAutoCompactionSettings.ts | 40 ++++++++++++ src/browser/hooks/useClampedNumberInput.ts | 56 ++++++++++++++++ .../utils/compaction/autoCompactionCheck.ts | 27 ++++++-- src/common/constants/storage.ts | 18 +++++ src/common/constants/ui.ts | 17 +++++ 9 files changed, 231 insertions(+), 12 deletions(-) create mode 100644 src/browser/components/RightSidebar/AutoCompactionSettings.tsx create mode 100644 src/browser/hooks/useAutoCompactionSettings.ts create mode 100644 src/browser/hooks/useClampedNumberInput.ts diff --git a/src/browser/components/AIView.tsx b/src/browser/components/AIView.tsx index f37f14d7b..9578a57dc 100644 --- a/src/browser/components/AIView.tsx +++ b/src/browser/components/AIView.tsx @@ -35,6 +35,7 @@ import { QueuedMessage } from "./Messages/QueuedMessage"; import { CompactionWarning } from "./CompactionWarning"; import { shouldAutoCompact } from "@/browser/utils/compaction/autoCompactionCheck"; import { use1MContext } from "@/browser/hooks/use1MContext"; +import { useAutoCompactionSettings } from "@/browser/hooks/useAutoCompactionSettings"; interface AIViewProps { workspaceId: string; @@ -80,6 +81,8 @@ const AIViewInner: React.FC = ({ const aggregator = useWorkspaceAggregator(workspaceId); const workspaceUsage = useWorkspaceUsage(workspaceId); const [use1M] = use1MContext(); + const { enabled: autoCompactionEnabled, threshold: autoCompactionThreshold } = + useAutoCompactionSettings(workspaceId); const handledModelErrorsRef = useRef>(new Set()); useEffect(() => { @@ -320,9 +323,13 @@ const AIViewInner: React.FC = ({ // Get active stream message ID for token counting const activeStreamMessageId = aggregator.getActiveStreamMessageId(); - const autoCompactionCheck = currentModel - ? shouldAutoCompact(workspaceUsage, currentModel, use1M) - : { shouldShowWarning: false, usagePercentage: 0, thresholdPercentage: 70 }; + const autoCompactionCheck = shouldAutoCompact( + workspaceUsage, + currentModel, + use1M, + autoCompactionEnabled, + autoCompactionThreshold / 100 + ); // Show warning when: shouldShowWarning flag is true AND not currently compacting const shouldShowCompactionWarning = !isCompacting && autoCompactionCheck.shouldShowWarning; diff --git a/src/browser/components/Context1MCheckbox.tsx b/src/browser/components/Context1MCheckbox.tsx index 1ee5ee68f..8a2e7fe3c 100644 --- a/src/browser/components/Context1MCheckbox.tsx +++ b/src/browser/components/Context1MCheckbox.tsx @@ -1,7 +1,7 @@ import React from "react"; import { use1MContext } from "@/browser/hooks/use1MContext"; import { supports1MContext } from "@/common/utils/ai/models"; -import { TooltipWrapper, Tooltip } from "./Tooltip"; +import { TooltipWrapper, Tooltip, HelpIndicator } from "./Tooltip"; interface Context1MCheckboxProps { modelString: string; @@ -22,7 +22,7 @@ export const Context1MCheckbox: React.FC = ({ modelStrin 1M - ? + ? Enable 1M token context window (beta feature for Claude Sonnet 4/4.5) diff --git a/src/browser/components/RightSidebar/AutoCompactionSettings.tsx b/src/browser/components/RightSidebar/AutoCompactionSettings.tsx new file mode 100644 index 000000000..518c85ce4 --- /dev/null +++ b/src/browser/components/RightSidebar/AutoCompactionSettings.tsx @@ -0,0 +1,65 @@ +import React from "react"; +import { useAutoCompactionSettings } from "@/browser/hooks/useAutoCompactionSettings"; +import { useClampedNumberInput } from "@/browser/hooks/useClampedNumberInput"; +import { + AUTO_COMPACTION_THRESHOLD_MIN, + AUTO_COMPACTION_THRESHOLD_MAX, +} from "@/common/constants/ui"; +import { TooltipWrapper, Tooltip, HelpIndicator } from "../Tooltip"; + +interface AutoCompactionSettingsProps { + workspaceId: string; +} + +export const AutoCompactionSettings: React.FC = ({ workspaceId }) => { + const { enabled, setEnabled, threshold, setThreshold } = useAutoCompactionSettings(workspaceId); + const { localValue, handleChange, handleBlur } = useClampedNumberInput( + threshold, + setThreshold, + AUTO_COMPACTION_THRESHOLD_MIN, + AUTO_COMPACTION_THRESHOLD_MAX + ); + + return ( +
+
+ {/* Left side: checkbox + label + tooltip */} +
+ + + ? + + Automatically compact conversation history when context usage reaches the threshold + + +
+ + {/* Right side: input + % symbol */} +
+ + % +
+
+
+ ); +}; diff --git a/src/browser/components/RightSidebar/CostsTab.tsx b/src/browser/components/RightSidebar/CostsTab.tsx index 0705a8aff..3ab598ce1 100644 --- a/src/browser/components/RightSidebar/CostsTab.tsx +++ b/src/browser/components/RightSidebar/CostsTab.tsx @@ -8,6 +8,7 @@ import { use1MContext } from "@/browser/hooks/use1MContext"; import { supports1MContext } from "@/common/utils/ai/models"; import { TOKEN_COMPONENT_COLORS } from "@/common/utils/tokens/tokenMeterUtils"; import { ConsumerBreakdown } from "./ConsumerBreakdown"; +import { AutoCompactionSettings } from "./AutoCompactionSettings"; // Format token display - show k for thousands with 1 decimal const formatTokens = (tokens: number) => @@ -230,6 +231,8 @@ const CostsTabComponent: React.FC = ({ workspaceId }) => { )} + {hasUsageData && } + {hasUsageData && (
diff --git a/src/browser/hooks/useAutoCompactionSettings.ts b/src/browser/hooks/useAutoCompactionSettings.ts new file mode 100644 index 000000000..3a5b436ed --- /dev/null +++ b/src/browser/hooks/useAutoCompactionSettings.ts @@ -0,0 +1,40 @@ +import { usePersistedState } from "@/browser/hooks/usePersistedState"; +import { + getAutoCompactionEnabledKey, + getAutoCompactionThresholdKey, +} from "@/common/constants/storage"; +import { DEFAULT_AUTO_COMPACTION_THRESHOLD_PERCENT } from "@/common/constants/ui"; + +export interface AutoCompactionSettings { + /** Whether auto-compaction is enabled for this workspace */ + enabled: boolean; + /** Update enabled state */ + setEnabled: (value: boolean) => void; + /** Current threshold percentage (50-90) */ + threshold: number; + /** Update threshold percentage (will be clamped to 50-90 range by UI) */ + setThreshold: (value: number) => void; +} + +/** + * Custom hook for auto-compaction settings per workspace. + * Persists both enabled state and threshold percentage to localStorage. + * + * @param workspaceId - Workspace identifier + * @returns Settings object with getters and setters + */ +export function useAutoCompactionSettings(workspaceId: string): AutoCompactionSettings { + const [enabled, setEnabled] = usePersistedState( + getAutoCompactionEnabledKey(workspaceId), + true, + { listener: true } + ); + + const [threshold, setThreshold] = usePersistedState( + getAutoCompactionThresholdKey(workspaceId), + DEFAULT_AUTO_COMPACTION_THRESHOLD_PERCENT, + { listener: true } + ); + + return { enabled, setEnabled, threshold, setThreshold }; +} diff --git a/src/browser/hooks/useClampedNumberInput.ts b/src/browser/hooks/useClampedNumberInput.ts new file mode 100644 index 000000000..998ff0e97 --- /dev/null +++ b/src/browser/hooks/useClampedNumberInput.ts @@ -0,0 +1,56 @@ +import React from "react"; + +/** + * Hook for number input with local state, validation, and clamping on blur. + * Prevents typing interruption while ensuring valid persisted values. + * + * @param persistedValue - Current value from persistence layer + * @param setPersisted - Function to update persisted value + * @param min - Minimum allowed value + * @param max - Maximum allowed value + * @returns Object with localValue, handleChange, and handleBlur + */ +export function useClampedNumberInput( + persistedValue: number, + setPersisted: (value: number) => void, + min: number, + max: number +) { + const [localValue, setLocalValue] = React.useState(persistedValue.toString()); + + // Sync local state when persisted value changes (e.g., from other tabs) + React.useEffect(() => { + setLocalValue(persistedValue.toString()); + }, [persistedValue]); + + const handleChange = (e: React.ChangeEvent) => { + const input = e.target.value; + // Allow empty or valid partial numbers (1-3 digits for typical use) + if (input === "" || /^\d{1,3}$/.test(input)) { + setLocalValue(input); + } + }; + + const handleBlur = () => { + const num = parseInt(localValue); + + if (localValue === "" || isNaN(num)) { + // Invalid input - revert to persisted value + setLocalValue(persistedValue.toString()); + } else if (num < min) { + // Below minimum - clamp to min + setPersisted(min); + setLocalValue(min.toString()); + } else if (num > max) { + // Above maximum - clamp to max + setPersisted(max); + setLocalValue(max.toString()); + } else { + // Valid - persist the value + setPersisted(num); + setLocalValue(num.toString()); + } + }; + + return { localValue, handleChange, handleBlur }; +} diff --git a/src/browser/utils/compaction/autoCompactionCheck.ts b/src/browser/utils/compaction/autoCompactionCheck.ts index 4369eadc4..69db9d5e0 100644 --- a/src/browser/utils/compaction/autoCompactionCheck.ts +++ b/src/browser/utils/compaction/autoCompactionCheck.ts @@ -18,17 +18,15 @@ import type { WorkspaceUsageState } from "@/browser/stores/WorkspaceStore"; import { getModelStats } from "@/common/utils/tokens/modelStats"; import { supports1MContext } from "@/common/utils/ai/models"; +import { DEFAULT_AUTO_COMPACTION_THRESHOLD } from "@/common/constants/ui"; export interface AutoCompactionCheckResult { shouldShowWarning: boolean; usagePercentage: number; thresholdPercentage: number; + enabled: boolean; } -// Auto-compaction threshold (0.7 = 70%) -// TODO: Make this configurable via settings -const AUTO_COMPACTION_THRESHOLD = 0.7; - // Show warning this many percentage points before threshold const WARNING_ADVANCE_PERCENT = 10; @@ -36,27 +34,40 @@ const WARNING_ADVANCE_PERCENT = 10; * Check if auto-compaction should trigger based on token usage * * @param usage - Current workspace usage state (from useWorkspaceUsage) - * @param model - Current model string + * @param model - Current model string (optional - returns safe default if not provided) * @param use1M - Whether 1M context is enabled + * @param enabled - Whether auto-compaction is enabled for this workspace * @param threshold - Usage percentage threshold (0.0-1.0, default 0.7 = 70%) * @param warningAdvancePercent - Show warning this many percentage points before threshold (default 10) * @returns Check result with warning flag and usage percentage */ export function shouldAutoCompact( usage: WorkspaceUsageState | undefined, - model: string, + model: string | null | undefined, use1M: boolean, - threshold: number = AUTO_COMPACTION_THRESHOLD, + enabled = true, + threshold: number = DEFAULT_AUTO_COMPACTION_THRESHOLD, warningAdvancePercent: number = WARNING_ADVANCE_PERCENT ): AutoCompactionCheckResult { const thresholdPercentage = threshold * 100; + // Short-circuit if auto-compaction is disabled + if (!enabled || !model) { + return { + shouldShowWarning: false, + usagePercentage: 0, + thresholdPercentage, + enabled: false, + }; + } + // No usage data yet - safe default (don't trigger on first message) if (!usage || usage.usageHistory.length === 0) { return { shouldShowWarning: false, usagePercentage: 0, thresholdPercentage, + enabled: true, }; } @@ -70,6 +81,7 @@ export function shouldAutoCompact( shouldShowWarning: false, usagePercentage: 0, thresholdPercentage, + enabled: true, }; } @@ -83,5 +95,6 @@ export function shouldAutoCompact( shouldShowWarning, usagePercentage, thresholdPercentage, + enabled: true, }; } diff --git a/src/common/constants/storage.ts b/src/common/constants/storage.ts index 6ce7477ec..a2bbc465e 100644 --- a/src/common/constants/storage.ts +++ b/src/common/constants/storage.ts @@ -153,6 +153,22 @@ export function getReviewSearchStateKey(workspaceId: string): string { return `reviewSearchState:${workspaceId}`; } +/** + * Get the localStorage key for auto-compaction enabled preference per workspace + * Format: "autoCompaction:enabled:{workspaceId}" + */ +export function getAutoCompactionEnabledKey(workspaceId: string): string { + return `autoCompaction:enabled:${workspaceId}`; +} + +/** + * Get the localStorage key for auto-compaction threshold percentage per workspace + * Format: "autoCompaction:threshold:{workspaceId}" + */ +export function getAutoCompactionThresholdKey(workspaceId: string): string { + return `autoCompaction:threshold:${workspaceId}`; +} + /** * List of workspace-scoped key functions that should be copied on fork and deleted on removal */ @@ -166,6 +182,8 @@ const PERSISTENT_WORKSPACE_KEY_FUNCTIONS: Array<(workspaceId: string) => string> getReviewExpandStateKey, getFileTreeExpandStateKey, getReviewSearchStateKey, + getAutoCompactionEnabledKey, + getAutoCompactionThresholdKey, ]; /** diff --git a/src/common/constants/ui.ts b/src/common/constants/ui.ts index d038b8fef..f4b7437a5 100644 --- a/src/common/constants/ui.ts +++ b/src/common/constants/ui.ts @@ -10,6 +10,23 @@ */ export const COMPACTED_EMOJI = "📦"; +/** + * Auto-compaction threshold bounds (percentage) + * Too low risks frequent interruptions; too high risks hitting context limits + */ +export const AUTO_COMPACTION_THRESHOLD_MIN = 50; +export const AUTO_COMPACTION_THRESHOLD_MAX = 90; + +/** + * Default auto-compaction threshold percentage (50-90 range) + * Applied when creating new workspaces + */ +export const DEFAULT_AUTO_COMPACTION_THRESHOLD_PERCENT = 70; + +/** + * Default threshold as decimal for calculations (0.7 = 70%) + */ +export const DEFAULT_AUTO_COMPACTION_THRESHOLD = DEFAULT_AUTO_COMPACTION_THRESHOLD_PERCENT / 100; /** * Duration (ms) to show "copied" feedback after copying to clipboard */