diff --git a/src/App.tsx b/src/App.tsx index e9390ccdb5..cb54a5ebbf 100644 --- a/src/App.tsx +++ b/src/App.tsx @@ -197,7 +197,7 @@ function AppInner() { useResumeManager(); // Handle auto-continue after compaction (when user uses /compact -c) - const { handleCompactStart } = useAutoCompactContinue(); + useAutoCompactContinue(); // Sync selectedWorkspace with URL hash useEffect(() => { @@ -641,13 +641,11 @@ function AppInner() { workspaceInfo={`${selectedWorkspace.projectName}/${selectedWorkspace.workspacePath.split("/").pop() ?? ""}`} > - handleCompactStart(selectedWorkspace.workspaceId, continueMessage) - } /> ) : ( diff --git a/src/components/AIView.tsx b/src/components/AIView.tsx index 27c2dce17b..faf197604a 100644 --- a/src/components/AIView.tsx +++ b/src/components/AIView.tsx @@ -192,7 +192,6 @@ interface AIViewProps { projectName: string; branch: string; workspacePath: string; - onCompactStart?: (continueMessage: string | undefined) => void; className?: string; } @@ -201,7 +200,6 @@ const AIViewInner: React.FC = ({ projectName, branch, workspacePath, - onCompactStart, className, }) => { // NEW: Get workspace state from store (only re-renders when THIS workspace changes) @@ -326,6 +324,23 @@ const AIViewInner: React.FC = ({ handleOpenTerminal, }); + // Clear editing state if the message being edited no longer exists + // Must be before early return to satisfy React Hooks rules + useEffect(() => { + if (!workspaceState || !editingMessage) return; + + const mergedMessages = mergeConsecutiveStreamErrors(workspaceState.messages); + const editCutoffHistoryId = mergedMessages.find( + (msg): msg is Exclude => + msg.type !== "history-hidden" && msg.historyId === editingMessage.id + )?.historyId; + + if (!editCutoffHistoryId) { + // Message was replaced or deleted - clear editing state + setEditingMessage(undefined); + } + }, [workspaceState, editingMessage]); + // Return early if workspace state not loaded yet if (!workspaceState) { return ( @@ -344,7 +359,6 @@ const AIViewInner: React.FC = ({ workspaceState; // Get active stream message ID for token counting - // Use getActiveStreamMessageId() which returns the messageId directly const activeStreamMessageId = aggregator.getActiveStreamMessageId(); // Track if last message was interrupted or errored (for RetryBarrier) @@ -451,6 +465,7 @@ const AIViewInner: React.FC = ({ onEditUserMessage={handleEditUserMessage} workspaceId={workspaceId} model={currentModel ?? undefined} + isCompacting={isCompacting} /> {isAtCutoff && ( @@ -507,7 +522,6 @@ const AIViewInner: React.FC = ({ onMessageSent={handleMessageSent} onTruncateHistory={handleClearHistory} onProviderConfig={handleProviderConfig} - onCompactStart={onCompactStart} disabled={!projectName || !branch} isCompacting={isCompacting} editingMessage={editingMessage} diff --git a/src/components/ChatInput.tsx b/src/components/ChatInput.tsx index e4c17b5c0e..b3a0005154 100644 --- a/src/components/ChatInput.tsx +++ b/src/components/ChatInput.tsx @@ -26,6 +26,8 @@ import { VimTextArea } from "./VimTextArea"; import { ImageAttachments, type ImageAttachment } from "./ImageAttachments"; import type { ThinkingLevel } from "@/types/thinking"; +import type { CmuxFrontendMetadata } from "@/types/message"; +import type { SendMessageOptions } from "@/types/ipc"; const InputSection = styled.div` position: relative; @@ -122,7 +124,6 @@ export interface ChatInputProps { onTruncateHistory: (percentage?: number) => Promise; onProviderConfig?: (provider: string, keyPath: string[], value: string) => Promise; onModelChange?: (model: string) => void; - onCompactStart?: (continueMessage: string | undefined) => void; // Called when compaction starts to update continue message state disabled?: boolean; isCompacting?: boolean; editingMessage?: { id: string; content: string }; @@ -282,13 +283,53 @@ const createErrorToast = (error: SendMessageErrorType): Toast => { } }; +/** + * Prepare compaction message from /compact command + * Returns the actual message text (summarization request), metadata, and options + */ +function prepareCompactionMessage( + command: string, + sendMessageOptions: SendMessageOptions +): { + messageText: string; + metadata: CmuxFrontendMetadata; + options: Partial; +} { + const parsed = parseCommand(command); + if (parsed?.type !== "compact") { + throw new Error("Not a compact command"); + } + + const targetWords = parsed.maxOutputTokens ? Math.round(parsed.maxOutputTokens / 1.3) : 2000; + + const messageText = `Summarize this conversation into a compact form for a new Assistant to continue helping the user. Use approximately ${targetWords} words.`; + + const metadata: CmuxFrontendMetadata = { + type: "compaction-request", + rawCommand: command, + parsed: { + maxOutputTokens: parsed.maxOutputTokens, + continueMessage: parsed.continueMessage, + }, + }; + + const isAnthropic = sendMessageOptions.model.startsWith("anthropic:"); + const options: Partial = { + thinkingLevel: isAnthropic ? "off" : sendMessageOptions.thinkingLevel, + toolPolicy: [{ regex_match: "compact_summary", action: "require" }], + maxOutputTokens: parsed.maxOutputTokens, + mode: "compact" as const, + }; + + return { messageText, metadata, options }; +} + export const ChatInput: React.FC = ({ workspaceId, onMessageSent, onTruncateHistory, onProviderConfig, onModelChange, - onCompactStart, disabled = false, isCompacting = false, editingMessage, @@ -296,7 +337,7 @@ export const ChatInput: React.FC = ({ canInterrupt = false, onReady, }) => { - const [input, setInput] = usePersistedState(getInputKey(workspaceId), ""); + const [input, setInput] = usePersistedState(getInputKey(workspaceId), "", { listener: true }); const [isSending, setIsSending] = useState(false); const [showCommandSuggestions, setShowCommandSuggestions] = useState(false); const [commandSuggestions, setCommandSuggestions] = useState([]); @@ -524,8 +565,9 @@ export const ChatInput: React.FC = ({ const handleSend = async () => { // Allow sending if there's text or images - if ((!input.trim() && imageAttachments.length === 0) || disabled || isSending || isCompacting) + if ((!input.trim() && imageAttachments.length === 0) || disabled || isSending || isCompacting) { return; + } const messageText = input.trim(); @@ -610,22 +652,17 @@ export const ChatInput: React.FC = ({ setIsSending(true); try { - // Construct message asking for summarization - const targetWords = parsed.maxOutputTokens - ? Math.round(parsed.maxOutputTokens / 1.3) - : 2000; - const compactionMessage = `Summarize this conversation into a compact form for a new Assistant to continue helping the user. Use approximately ${targetWords} words.`; - - // Send message with compact_summary tool required and maxOutputTokens in options - // Note: Anthropic doesn't support extended thinking with required tool_choice, - // so disable thinking for Anthropic models during compaction - const isAnthropic = sendMessageOptions.model.startsWith("anthropic:"); + const { + messageText: compactionMessage, + metadata, + options, + } = prepareCompactionMessage(messageText, sendMessageOptions); + const result = await window.api.workspace.sendMessage(workspaceId, compactionMessage, { ...sendMessageOptions, - thinkingLevel: isAnthropic ? "off" : sendMessageOptions.thinkingLevel, - toolPolicy: [{ regex_match: "compact_summary", action: "require" }], - maxOutputTokens: parsed.maxOutputTokens, // Pass to model directly - mode: "compact" as const, // Allow users to customize compaction behavior via Mode: compact in AGENTS.md + ...options, + cmuxMetadata: metadata, + editMessageId: editingMessage?.id, // Support editing compaction messages }); if (!result.success) { @@ -633,18 +670,18 @@ export const ChatInput: React.FC = ({ setToast(createErrorToast(result.error)); setInput(messageText); // Restore input on error } else { - // Notify parent to update continue message state (parent handles storage) - if (onCompactStart) { - onCompactStart(parsed.continueMessage); - } - setToast({ id: Date.now().toString(), type: "success", - message: parsed.continueMessage - ? "Compaction started. Will continue automatically after completion." - : "Compaction started. AI will summarize the conversation.", + message: + metadata.type === "compaction-request" && metadata.parsed.continueMessage + ? "Compaction started. Will continue automatically after completion." + : "Compaction started. AI will summarize the conversation.", }); + // Clear editing state on success + if (editingMessage && onCancelEdit) { + onCancelEdit(); + } } } catch (error) { console.error("Compaction error:", error); @@ -678,10 +715,31 @@ export const ChatInput: React.FC = ({ mimeType: img.mimeType, })); - const result = await window.api.workspace.sendMessage(workspaceId, messageText, { + // When editing a /compact command, regenerate the actual summarization request + let actualMessageText = messageText; + let cmuxMetadata: CmuxFrontendMetadata | undefined; + let compactionOptions = {}; + + if (editingMessage && messageText.startsWith("/")) { + const parsed = parseCommand(messageText); + if (parsed?.type === "compact") { + const { + messageText: regeneratedText, + metadata, + options, + } = prepareCompactionMessage(messageText, sendMessageOptions); + actualMessageText = regeneratedText; + cmuxMetadata = metadata; + compactionOptions = options; + } + } + + const result = await window.api.workspace.sendMessage(workspaceId, actualMessageText, { ...sendMessageOptions, + ...compactionOptions, editMessageId: editingMessage?.id, imageParts: imageParts.length > 0 ? imageParts : undefined, + cmuxMetadata, }); if (!result.success) { @@ -782,7 +840,7 @@ export const ChatInput: React.FC = ({ return `Edit your message... (${formatKeybind(KEYBINDS.CANCEL)} to cancel edit, ${formatKeybind(KEYBINDS.SEND_MESSAGE)} to send)`; } if (isCompacting) { - return "Compacting conversation..."; + return `Compacting... (${formatKeybind(KEYBINDS.INTERRUPT_STREAM)} to cancel)`; } // Build hints for normal input @@ -818,7 +876,7 @@ export const ChatInput: React.FC = ({ onPaste={handlePaste} suppressKeys={showCommandSuggestions ? COMMAND_SUGGESTION_KEYS : undefined} placeholder={placeholder} - disabled={disabled || isSending || isCompacting} + disabled={!editingMessage && (disabled || isSending || isCompacting)} aria-label={editingMessage ? "Edit your last message" : "Message Claude"} aria-autocomplete="list" aria-controls={ diff --git a/src/components/Messages/MessageRenderer.tsx b/src/components/Messages/MessageRenderer.tsx index 8084890890..29df029918 100644 --- a/src/components/Messages/MessageRenderer.tsx +++ b/src/components/Messages/MessageRenderer.tsx @@ -13,15 +13,23 @@ interface MessageRendererProps { onEditUserMessage?: (messageId: string, content: string) => void; workspaceId?: string; model?: string; + isCompacting?: boolean; } // Memoized to prevent unnecessary re-renders when parent (AIView) updates export const MessageRenderer = React.memo( - ({ message, className, onEditUserMessage, workspaceId, model }) => { + ({ message, className, onEditUserMessage, workspaceId, model, isCompacting }) => { // Route based on message type switch (message.type) { case "user": - return ; + return ( + + ); case "assistant": return ( diff --git a/src/components/Messages/UserMessage.tsx b/src/components/Messages/UserMessage.tsx index 1b074ddc24..685f31651f 100644 --- a/src/components/Messages/UserMessage.tsx +++ b/src/components/Messages/UserMessage.tsx @@ -4,6 +4,7 @@ import type { DisplayedMessage } from "@/types/message"; import type { ButtonConfig } from "./MessageWindow"; import { MessageWindow } from "./MessageWindow"; import { TerminalOutput } from "./TerminalOutput"; +import { formatKeybind, KEYBINDS } from "@/utils/ui/keybinds"; const FormattedContent = styled.pre` margin: 0; @@ -34,9 +35,15 @@ interface UserMessageProps { message: DisplayedMessage & { type: "user" }; className?: string; onEdit?: (messageId: string, content: string) => void; + isCompacting?: boolean; } -export const UserMessage: React.FC = ({ message, className, onEdit }) => { +export const UserMessage: React.FC = ({ + message, + className, + onEdit, + isCompacting, +}) => { const [copied, setCopied] = useState(false); const content = message.content; @@ -72,6 +79,10 @@ export const UserMessage: React.FC = ({ message, className, on { label: "Edit", onClick: handleEdit, + disabled: isCompacting, + tooltip: isCompacting + ? `Cannot edit while compacting (press ${formatKeybind(KEYBINDS.INTERRUPT_STREAM)} to cancel)` + : undefined, }, ] : []), diff --git a/src/hooks/useAutoCompactContinue.ts b/src/hooks/useAutoCompactContinue.ts index 3abb0e3478..2b98fe190e 100644 --- a/src/hooks/useAutoCompactContinue.ts +++ b/src/hooks/useAutoCompactContinue.ts @@ -1,17 +1,22 @@ import { useRef, useEffect } from "react"; import { useWorkspaceStoreRaw, type WorkspaceState } from "@/stores/WorkspaceStore"; -import { getCompactContinueMessageKey } from "@/constants/storage"; import { buildSendMessageOptions } from "@/hooks/useSendMessageOptions"; /** - * Hook to manage auto-continue after compaction + * Hook to manage auto-continue after compaction using structured message metadata * - * Stateless reactive approach: - * - Watches all workspaces for single compacted message - * - Builds sendMessage options from localStorage + * Approach: + * - Watches all workspaces for single compacted message (compaction just completed) + * - Reads continueMessage from the summary message's compaction-result metadata * - Sends continue message automatically * + * Why summary metadata? When compaction completes, history is replaced with just the + * summary message. The original compaction-request message is deleted. To preserve + * the continueMessage across this replacement, we extract it before replacement and + * store it in the summary's metadata. + * * Self-contained: No callback needed. Hook detects condition and handles action. + * No localStorage - metadata is the single source of truth. * * IMPORTANT: sendMessage options (model, thinking level, mode, etc.) are managed by the * frontend via buildSendMessageOptions. The backend does NOT fall back to workspace @@ -51,22 +56,24 @@ export function useAutoCompactContinue() { // Only proceed once per compaction completion if (firedForWorkspace.current.has(workspaceId)) continue; - const continueMessage = localStorage.getItem(getCompactContinueMessageKey(workspaceId)); + // After compaction, history is replaced with a single summary message + // The summary message has compaction-result metadata with the continueMessage + const summaryMessage = state.cmuxMessages[0]; // Single compacted message + const cmuxMeta = summaryMessage?.metadata?.cmuxMetadata; - if (continueMessage) { + if (cmuxMeta?.type === "compaction-result" && cmuxMeta.continueMessage) { // Mark as fired immediately to avoid re-entry on rapid renders firedForWorkspace.current.add(workspaceId); - // Clean up first to prevent duplicate sends (source of truth becomes history) - localStorage.removeItem(getCompactContinueMessageKey(workspaceId)); - // Build options and send message directly const options = buildSendMessageOptions(workspaceId); - window.api.workspace.sendMessage(workspaceId, continueMessage, options).catch((error) => { - console.error("Failed to send continue message:", error); - // If sending failed, allow another attempt on next render by clearing the guard - firedForWorkspace.current.delete(workspaceId); - }); + window.api.workspace + .sendMessage(workspaceId, cmuxMeta.continueMessage, options) + .catch((error) => { + console.error("Failed to send continue message:", error); + // If sending failed, allow another attempt on next render by clearing the guard + firedForWorkspace.current.delete(workspaceId); + }); } } }; @@ -83,18 +90,4 @@ export function useAutoCompactContinue() { return unsubscribe; }, [store]); // eslint-disable-line react-hooks/exhaustive-deps - - // Simple callback to store continue message in localStorage - // Called by ChatInput when /compact is parsed - const handleCompactStart = (workspaceId: string, continueMessage: string | undefined) => { - if (continueMessage) { - localStorage.setItem(getCompactContinueMessageKey(workspaceId), continueMessage); - } else { - // Clear any pending continue message if -c flag not provided - // Ensures stored message reflects latest user intent - localStorage.removeItem(getCompactContinueMessageKey(workspaceId)); - } - }; - - return { handleCompactStart }; } diff --git a/src/services/agentSession.ts b/src/services/agentSession.ts index aef1608068..670eaf01b9 100644 --- a/src/services/agentSession.ts +++ b/src/services/agentSession.ts @@ -233,6 +233,7 @@ export class AgentSession { { timestamp: Date.now(), toolPolicy: options?.toolPolicy, + cmuxMetadata: options?.cmuxMetadata, // Pass through frontend metadata as black-box }, additionalParts ); diff --git a/src/stores/WorkspaceStore.test.ts b/src/stores/WorkspaceStore.test.ts index 0c3be33ec8..fbd52e3383 100644 --- a/src/stores/WorkspaceStore.test.ts +++ b/src/stores/WorkspaceStore.test.ts @@ -184,6 +184,12 @@ describe("WorkspaceStore", () => { messageId?: string; model?: string; }>(); + + // Mark workspace as caught-up first (required for stream events to process) + onChatCallback({ + type: "caught-up", + }); + onChatCallback({ type: "stream-start", messageId: "msg-1", @@ -268,9 +274,15 @@ describe("WorkspaceStore", () => { // Trigger change const onChatCallback = getOnChatCallback<{ type: string; - messageId: string; - model: string; + messageId?: string; + model?: string; }>(); + + // Mark workspace as caught-up first + onChatCallback({ + type: "caught-up", + }); + onChatCallback({ type: "stream-start", messageId: "msg1", @@ -298,9 +310,15 @@ describe("WorkspaceStore", () => { // Trigger change const onChatCallback = getOnChatCallback<{ type: string; - messageId: string; - model: string; + messageId?: string; + model?: string; }>(); + + // Mark workspace as caught-up first + onChatCallback({ + type: "caught-up", + }); + onChatCallback({ type: "stream-start", messageId: "msg1", @@ -379,10 +397,14 @@ describe("WorkspaceStore", () => { // but if a message was already queued, it should handle gracefully const onChatCallbackTyped = onChatCallback as (data: { type: string; - messageId: string; - model: string; + messageId?: string; + model?: string; }) => void; expect(() => { + // Mark as caught-up first + onChatCallbackTyped({ + type: "caught-up", + }); onChatCallbackTyped({ type: "stream-start", messageId: "msg1", diff --git a/src/stores/WorkspaceStore.ts b/src/stores/WorkspaceStore.ts index e5c94c6007..43349f6d1f 100644 --- a/src/stores/WorkspaceStore.ts +++ b/src/stores/WorkspaceStore.ts @@ -80,6 +80,7 @@ export class WorkspaceStore { private ipcUnsubscribers = new Map void>(); private caughtUp = new Map(); private historicalMessages = new Map(); + private pendingStreamEvents = new Map(); // Cache of last known recency per workspace (for change detection) private recencyCache = new Map(); @@ -302,6 +303,7 @@ export class WorkspaceStore { this.aggregators.delete(workspaceId); this.caughtUp.delete(workspaceId); this.historicalMessages.delete(workspaceId); + this.pendingStreamEvents.delete(workspaceId); this.recencyCache.delete(workspaceId); this.previousSidebarValues.delete(workspaceId); this.sidebarStateCache.delete(workspaceId); @@ -342,6 +344,7 @@ export class WorkspaceStore { this.aggregators.clear(); this.caughtUp.clear(); this.historicalMessages.clear(); + this.pendingStreamEvents.clear(); } // Private methods @@ -353,22 +356,63 @@ export class WorkspaceStore { return this.aggregators.get(workspaceId)!; } + private isStreamEvent(data: WorkspaceChatMessage): boolean { + return ( + isStreamStart(data) || + isStreamDelta(data) || + isStreamEnd(data) || + isStreamAbort(data) || + isToolCallStart(data) || + isToolCallDelta(data) || + isToolCallEnd(data) || + isReasoningDelta(data) || + isReasoningEnd(data) + ); + } + private handleChatMessage(workspaceId: string, data: WorkspaceChatMessage): void { const aggregator = this.getOrCreateAggregator(workspaceId); const isCaughtUp = this.caughtUp.get(workspaceId) ?? false; const historicalMsgs = this.historicalMessages.get(workspaceId) ?? []; if (isCaughtUpMessage(data)) { + // Load historical messages first if (historicalMsgs.length > 0) { aggregator.loadHistoricalMessages(historicalMsgs); this.historicalMessages.set(workspaceId, []); } + + // Process buffered stream events now that history is loaded + const pendingEvents = this.pendingStreamEvents.get(workspaceId) ?? []; + for (const event of pendingEvents) { + this.processStreamEvent(workspaceId, aggregator, event); + } + this.pendingStreamEvents.set(workspaceId, []); + + // Mark as caught up this.caughtUp.set(workspaceId, true); this.states.bump(workspaceId); this.checkAndBumpRecencyIfChanged(); // Messages loaded, update recency return; } + // Buffer stream events until caught up (so they have full historical context) + if (!isCaughtUp && this.isStreamEvent(data)) { + const pending = this.pendingStreamEvents.get(workspaceId) ?? []; + pending.push(data); + this.pendingStreamEvents.set(workspaceId, pending); + return; + } + + // Process event immediately (already caught up or not a stream event) + this.processStreamEvent(workspaceId, aggregator, data); + } + + private processStreamEvent( + workspaceId: string, + aggregator: StreamingMessageAggregator, + data: WorkspaceChatMessage + ): void { if (isStreamError(data)) { aggregator.handleStreamError(data); this.states.bump(workspaceId); @@ -418,6 +462,20 @@ export class WorkspaceStore { if (part.type === "dynamic-tool" && part.toolName === "compact_summary") { const output = part.output as { summary?: string } | undefined; if (output?.summary) { + // Extract continueMessage from compaction-request before history gets replaced + const messages = aggregator.getAllMessages(); + const compactRequestMsg = [...messages] + .reverse() + .find( + (m) => + m.role === "user" && m.metadata?.cmuxMetadata?.type === "compaction-request" + ); + const cmuxMeta = compactRequestMsg?.metadata?.cmuxMetadata; + const continueMessage = + cmuxMeta?.type === "compaction-request" + ? cmuxMeta.parsed.continueMessage + : undefined; + const summaryMessage = createCmuxMessage( `summary-${Date.now()}-${Math.random().toString(36).substring(2, 11)}`, "assistant", @@ -430,6 +488,10 @@ export class WorkspaceStore { providerMetadata: data.metadata.providerMetadata, duration: data.metadata.duration, systemMessageTokens: data.metadata.systemMessageTokens, + // Store continueMessage in summary so it survives history replacement + cmuxMetadata: continueMessage + ? { type: "compaction-result", continueMessage } + : { type: "normal" }, } ); @@ -496,9 +558,11 @@ export class WorkspaceStore { return; } - // Regular messages + // Regular messages (CmuxMessage without type field) + const isCaughtUp = this.caughtUp.get(workspaceId) ?? false; if (!isCaughtUp) { if ("role" in data && !("type" in data)) { + const historicalMsgs = this.historicalMessages.get(workspaceId) ?? []; historicalMsgs.push(data); this.historicalMessages.set(workspaceId, historicalMsgs); } diff --git a/src/types/ipc.ts b/src/types/ipc.ts index 7ffd124757..9bba038296 100644 --- a/src/types/ipc.ts +++ b/src/types/ipc.ts @@ -1,6 +1,6 @@ import type { Result } from "./result"; import type { WorkspaceMetadata } from "./workspace"; -import type { CmuxMessage } from "./message"; +import type { CmuxMessage, CmuxFrontendMetadata } from "./message"; import type { ProjectConfig } from "@/config"; import type { SendMessageError, StreamErrorType } from "./errors"; import type { ThinkingLevel } from "./thinking"; @@ -141,6 +141,7 @@ export interface SendMessageOptions { maxOutputTokens?: number; providerOptions?: CmuxProviderOptions; mode?: string; // Mode name - frontend narrows to specific values, backend accepts any string + cmuxMetadata?: CmuxFrontendMetadata; // Frontend-defined metadata, backend treats as black-box } // API method signatures (shared between main and preload) diff --git a/src/types/message.ts b/src/types/message.ts index 9af3b7d5ac..24cff7a1f8 100644 --- a/src/types/message.ts +++ b/src/types/message.ts @@ -3,6 +3,25 @@ import type { LanguageModelV2Usage } from "@ai-sdk/provider"; import type { StreamErrorType } from "./errors"; import type { ToolPolicy } from "@/utils/tools/toolPolicy"; +// Frontend-specific metadata stored in cmuxMetadata field +// Backend stores this as-is without interpretation (black-box) +export type CmuxFrontendMetadata = + | { + type: "compaction-request"; + rawCommand: string; // The original /compact command as typed by user (for display) + parsed: { + maxOutputTokens?: number; + continueMessage?: string; + }; + } + | { + type: "compaction-result"; + continueMessage: string; // Message to send after compaction completes + } + | { + type: "normal"; // Regular messages + }; + // Our custom metadata type export interface CmuxMetadata { historySequence?: number; // Assigned by backend for global message ordering (required when writing to history) @@ -19,6 +38,7 @@ export interface CmuxMetadata { compacted?: boolean; // Whether this message is a compacted summary of previous history toolPolicy?: ToolPolicy; // Tool policy active when this message was sent (user messages only) mode?: string; // The mode (plan/exec/etc) active when this message was sent (assistant messages only) + cmuxMetadata?: CmuxFrontendMetadata; // Frontend-defined metadata, backend treats as black-box } // Extended tool part type that supports interrupted tool calls (input-available state) @@ -71,6 +91,14 @@ export type DisplayedMessage = imageParts?: Array<{ image: string; mimeType?: string }>; // Optional image attachments historySequence: number; // Global ordering across all messages timestamp?: number; + compactionRequest?: { + // Present if this is a /compact command + rawCommand: string; + parsed: { + maxOutputTokens?: number; + continueMessage?: string; + }; + }; } | { type: "assistant"; diff --git a/src/utils/messages/StreamingMessageAggregator.ts b/src/utils/messages/StreamingMessageAggregator.ts index 280917430b..4a084c422d 100644 --- a/src/utils/messages/StreamingMessageAggregator.ts +++ b/src/utils/messages/StreamingMessageAggregator.ts @@ -172,15 +172,10 @@ export class StreamingMessageAggregator { // Unified event handlers that encapsulate all complex logic handleStreamStart(data: StreamStartEvent): void { - // Detect if this stream is compacting by checking last user message's toolPolicy + // Detect if this stream is compacting by checking if last user message is a compaction-request const messages = this.getAllMessages(); const lastUserMsg = [...messages].reverse().find((m) => m.role === "user"); - const isCompacting = - lastUserMsg?.metadata?.toolPolicy?.some( - (filter) => - filter.action === "require" && - new RegExp(`^${filter.regex_match}$`).test("compact_summary") - ) ?? false; + const isCompacting = lastUserMsg?.metadata?.cmuxMetadata?.type === "compaction-request"; const context: StreamingContext = { startTime: Date.now(), @@ -475,14 +470,25 @@ export class StreamingMessageAggregator { mimeType: p.mimeType, })); + // Check if this is a compaction request message + const cmuxMeta = message.metadata?.cmuxMetadata; + const compactionRequest = + cmuxMeta?.type === "compaction-request" + ? { + rawCommand: cmuxMeta.rawCommand, + parsed: cmuxMeta.parsed, + } + : undefined; + displayedMessages.push({ type: "user", id: message.id, historyId: message.id, - content, + content: compactionRequest ? compactionRequest.rawCommand : content, imageParts: imageParts.length > 0 ? imageParts : undefined, historySequence, timestamp: baseTimestamp, + compactionRequest, }); } else if (message.role === "assistant") { // Assistant messages: each part becomes a separate DisplayedMessage diff --git a/tests/ipcMain/sendMessage.test.ts b/tests/ipcMain/sendMessage.test.ts index aafa011e22..8e9a56580b 100644 --- a/tests/ipcMain/sendMessage.test.ts +++ b/tests/ipcMain/sendMessage.test.ts @@ -1377,4 +1377,70 @@ These are general instructions that apply to all modes. 90000 ); }); + + // Test frontend metadata round-trip (no provider needed - just verifies storage) + test.concurrent( + "should preserve arbitrary frontend metadata through IPC round-trip", + async () => { + const { env, workspaceId, cleanup } = await setupWorkspaceWithoutProvider(); + try { + // Create structured metadata + const testMetadata = { + type: "compaction-request" as const, + rawCommand: "/compact -c continue working", + parsed: { + maxOutputTokens: 5000, + continueMessage: "continue working", + }, + }; + + // Send a message with frontend metadata + // Use invalid model to fail fast - we only care about metadata storage + const result = await env.mockIpcRenderer.invoke( + IPC_CHANNELS.WORKSPACE_SEND_MESSAGE, + workspaceId, + "Test message with metadata", + { + model: "openai:gpt-4", // Valid format but provider not configured - will fail after storing message + cmuxMetadata: testMetadata, + } + ); + + // Note: IPC call will fail due to missing provider config, but that's okay + // We only care that the user message was written to history with metadata + // (sendMessage writes user message before attempting to stream) + + // Use event collector to get messages sent to frontend + const collector = createEventCollector(env.sentEvents, workspaceId); + + // Wait for the user message to appear in the chat channel + await waitFor(() => { + const messages = collector.collect(); + return messages.some((m) => "role" in m && m.role === "user"); + }, 2000); + + // Get all messages for this workspace + const allMessages = collector.collect(); + + // Find the user message we just sent + const userMessage = allMessages.find((msg) => "role" in msg && msg.role === "user"); + expect(userMessage).toBeDefined(); + + // Verify metadata was preserved exactly as sent (black-box) + expect(userMessage).toHaveProperty("metadata"); + const metadata = (userMessage as any).metadata; + expect(metadata).toHaveProperty("cmuxMetadata"); + expect(metadata.cmuxMetadata).toEqual(testMetadata); + + // Verify structured fields are accessible + expect(metadata.cmuxMetadata.type).toBe("compaction-request"); + expect(metadata.cmuxMetadata.rawCommand).toBe("/compact -c continue working"); + expect(metadata.cmuxMetadata.parsed.continueMessage).toBe("continue working"); + expect(metadata.cmuxMetadata.parsed.maxOutputTokens).toBe(5000); + } finally { + await cleanup(); + } + }, + 5000 + ); });