Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
137 changes: 8 additions & 129 deletions bun.lock

Large diffs are not rendered by default.

4 changes: 0 additions & 4 deletions src/browser/App.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,6 @@ import { usePersistedState, updatePersistedState } from "./hooks/usePersistedSta
import { matchesKeybind, KEYBINDS } from "./utils/ui/keybinds";
import { useResumeManager } from "./hooks/useResumeManager";
import { useUnreadTracking } from "./hooks/useUnreadTracking";
import { useAutoCompactContinue } from "./hooks/useAutoCompactContinue";
import { useWorkspaceStoreRaw, useWorkspaceRecency } from "./stores/WorkspaceStore";
import { ChatInput } from "./components/ChatInput/index";
import type { ChatInputAPI } from "./components/ChatInput/types";
Expand Down Expand Up @@ -116,9 +115,6 @@ function AppInner() {
// Auto-resume interrupted streams on app startup and when failures occur
useResumeManager();

// Handle auto-continue after compaction (when user uses /compact -c)
useAutoCompactContinue();

// Sync selectedWorkspace with URL hash
useEffect(() => {
if (selectedWorkspace) {
Expand Down
2 changes: 1 addition & 1 deletion src/browser/api.ts
Original file line number Diff line number Diff line change
Expand Up @@ -225,7 +225,7 @@ const webApi: IPCApi = {
invokeIPC(IPC_CHANNELS.WORKSPACE_RESUME_STREAM, workspaceId, options),
interruptStream: (workspaceId, options) =>
invokeIPC(IPC_CHANNELS.WORKSPACE_INTERRUPT_STREAM, workspaceId, options),
clearQueue: (workspaceId) => invokeIPC(IPC_CHANNELS.WORKSPACE_QUEUE_CLEAR, workspaceId),
clearQueue: (workspaceId) => invokeIPC(IPC_CHANNELS.WORKSPACE_CLEAR_QUEUE, workspaceId),
truncateHistory: (workspaceId, percentage) =>
invokeIPC(IPC_CHANNELS.WORKSPACE_TRUNCATE_HISTORY, workspaceId, percentage),
replaceChatHistory: (workspaceId, summaryMessage) =>
Expand Down
32 changes: 31 additions & 1 deletion src/browser/components/AIView.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -20,14 +20,22 @@ import { formatKeybind, KEYBINDS } from "@/browser/utils/ui/keybinds";
import { useAutoScroll } from "@/browser/hooks/useAutoScroll";
import { usePersistedState } from "@/browser/hooks/usePersistedState";
import { useThinking } from "@/browser/contexts/ThinkingContext";
import { useWorkspaceState, useWorkspaceAggregator } from "@/browser/stores/WorkspaceStore";
import {
useWorkspaceState,
useWorkspaceAggregator,
useWorkspaceUsage,
} from "@/browser/stores/WorkspaceStore";
import { WorkspaceHeader } from "./WorkspaceHeader";
import { getModelName } from "@/common/utils/ai/models";
import type { DisplayedMessage } from "@/common/types/message";
import type { RuntimeConfig } from "@/common/types/runtime";
import { useAIViewKeybinds } from "@/browser/hooks/useAIViewKeybinds";
import { evictModelFromLRU } from "@/browser/hooks/useModelLRU";
import { QueuedMessage } from "./Messages/QueuedMessage";
import { CompactionWarning } from "./CompactionWarning";
import { shouldAutoCompact } from "@/browser/utils/compaction/autoCompactionCheck";
import { use1MContext } from "@/browser/hooks/use1MContext";
import { useAutoCompactionSettings } from "@/browser/hooks/useAutoCompactionSettings";

interface AIViewProps {
workspaceId: string;
Expand Down Expand Up @@ -71,6 +79,10 @@ const AIViewInner: React.FC<AIViewProps> = ({

const workspaceState = useWorkspaceState(workspaceId);
const aggregator = useWorkspaceAggregator(workspaceId);
const workspaceUsage = useWorkspaceUsage(workspaceId);
const [use1M] = use1MContext();
const { enabled: autoCompactionEnabled, threshold: autoCompactionThreshold } =
useAutoCompactionSettings(workspaceId);
const handledModelErrorsRef = useRef<Set<string>>(new Set());

useEffect(() => {
Expand Down Expand Up @@ -311,6 +323,17 @@ const AIViewInner: React.FC<AIViewProps> = ({
// Get active stream message ID for token counting
const activeStreamMessageId = aggregator.getActiveStreamMessageId();

const autoCompactionCheck = shouldAutoCompact(
workspaceUsage,
currentModel,
use1M,
autoCompactionEnabled,
autoCompactionThreshold / 100
);

// Show warning when: shouldShowWarning flag is true AND not currently compacting
const shouldShowCompactionWarning = !isCompacting && autoCompactionCheck.shouldShowWarning;

// Note: We intentionally do NOT reset autoRetry when streams start.
// If user pressed the interrupt key, autoRetry stays false until they manually retry.
// This makes state transitions explicit and predictable.
Expand Down Expand Up @@ -496,6 +519,12 @@ const AIViewInner: React.FC<AIViewProps> = ({
</button>
)}
</div>
{shouldShowCompactionWarning && (
<CompactionWarning
usagePercentage={autoCompactionCheck.usagePercentage}
thresholdPercentage={autoCompactionCheck.thresholdPercentage}
/>
)}
<ChatInput
variant="workspace"
workspaceId={workspaceId}
Expand All @@ -509,6 +538,7 @@ const AIViewInner: React.FC<AIViewProps> = ({
onEditLastUserMessage={() => void handleEditLastUserMessage()}
canInterrupt={canInterrupt}
onReady={handleChatInputReady}
autoCompactionCheck={autoCompactionCheck}
/>
</div>

Expand Down
123 changes: 91 additions & 32 deletions src/browser/components/ChatInput/index.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@ import {
handleCompactCommand,
forkWorkspace,
prepareCompactionMessage,
executeCompaction,
type CommandHandlerContext,
} from "@/browser/utils/chatCommands";
import { CUSTOM_EVENTS } from "@/common/constants/events";
Expand Down Expand Up @@ -468,6 +469,32 @@ export const ChatInput: React.FC<ChatInputProps> = (props) => {
// Workspace variant: full command handling + message send
if (variant !== "workspace") return; // Type guard

// Prepare image parts if any
const imageParts = imageAttachments.map((img, index) => {
// Validate before sending to help with debugging
if (!img.url || typeof img.url !== "string") {
console.error(
`Image attachment [${index}] has invalid url:`,
typeof img.url,
img.url?.slice(0, 50)
);
}
if (!img.url?.startsWith("data:")) {
console.error(`Image attachment [${index}] url is not a data URL:`, img.url?.slice(0, 100));
}
if (!img.mediaType || typeof img.mediaType !== "string") {
console.error(
`Image attachment [${index}] has invalid mediaType:`,
typeof img.mediaType,
img.mediaType
);
}
return {
url: img.url,
mediaType: img.mediaType,
};
});

try {
// Parse command
const parsed = parseCommand(messageText);
Expand Down Expand Up @@ -567,8 +594,10 @@ export const ChatInput: React.FC<ChatInputProps> = (props) => {
const context: CommandHandlerContext = {
workspaceId: props.workspaceId,
sendMessageOptions,
imageParts,
editMessageId: editingMessage?.id,
setInput,
setImageAttachments,
setIsSending,
setToast,
onCancelEdit: props.onCancelEdit,
Expand Down Expand Up @@ -632,7 +661,9 @@ export const ChatInput: React.FC<ChatInputProps> = (props) => {
const context: CommandHandlerContext = {
workspaceId: props.workspaceId,
sendMessageOptions,
imageParts: undefined, // /new doesn't use images
setInput,
setImageAttachments,
setIsSending,
setToast,
};
Expand All @@ -652,42 +683,70 @@ export const ChatInput: React.FC<ChatInputProps> = (props) => {
}
}

// Regular message - send directly via API
setIsSending(true);

// Save current state for restoration on error
const previousImageAttachments = [...imageAttachments];

try {
// Prepare image parts if any
const imageParts = imageAttachments.map((img, index) => {
// Validate before sending to help with debugging
if (!img.url || typeof img.url !== "string") {
console.error(
`Image attachment [${index}] has invalid url:`,
typeof img.url,
img.url?.slice(0, 50)
);
}
if (!img.url?.startsWith("data:")) {
console.error(
`Image attachment [${index}] url is not a data URL:`,
img.url?.slice(0, 100)
);
}
if (!img.mediaType || typeof img.mediaType !== "string") {
console.error(
`Image attachment [${index}] has invalid mediaType:`,
typeof img.mediaType,
img.mediaType
);
// Auto-compaction check (workspace variant only)
// Check if we should auto-compact before sending this message
// Result is computed in parent (AIView) and passed down to avoid duplicate calculation
const shouldAutoCompact =
props.autoCompactionCheck &&
props.autoCompactionCheck.usagePercentage >= props.autoCompactionCheck.thresholdPercentage;
if (variant === "workspace" && !editingMessage && shouldAutoCompact) {
// Clear input immediately for responsive UX
setInput("");
setImageAttachments([]);
setIsSending(true);

try {
const result = await executeCompaction({
workspaceId: props.workspaceId,
continueMessage: {
text: messageText,
imageParts,
},
sendMessageOptions,
});

if (!result.success) {
// Restore on error
setInput(messageText);
setImageAttachments(previousImageAttachments);
setToast({
id: Date.now().toString(),
type: "error",
title: "Auto-Compaction Failed",
message: result.error ?? "Failed to start auto-compaction",
});
} else {
setToast({
id: Date.now().toString(),
type: "success",
message: `Context threshold reached - auto-compacting...`,
});
}
return {
url: img.url,
mediaType: img.mediaType,
};
});
} catch (error) {
// Restore on unexpected error
setInput(messageText);
setImageAttachments(previousImageAttachments);
setToast({
id: Date.now().toString(),
type: "error",
title: "Auto-Compaction Failed",
message:
error instanceof Error ? error.message : "Unexpected error during auto-compaction",
});
} finally {
setIsSending(false);
}

return; // Skip normal send
}

// Regular message - send directly via API
setIsSending(true);

try {
// When editing a /compact command, regenerate the actual summarization request
let actualMessageText = messageText;
let muxMetadata: MuxFrontendMetadata | undefined;
Expand All @@ -703,7 +762,7 @@ export const ChatInput: React.FC<ChatInputProps> = (props) => {
} = prepareCompactionMessage({
workspaceId: props.workspaceId,
maxOutputTokens: parsed.maxOutputTokens,
continueMessage: parsed.continueMessage,
continueMessage: { text: parsed.continueMessage ?? "", imageParts },
model: parsed.model,
sendMessageOptions,
});
Expand Down
2 changes: 2 additions & 0 deletions src/browser/components/ChatInput/types.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import type { ImagePart } from "@/common/types/ipc";
import type { FrontendWorkspaceMetadata } from "@/common/types/workspace";
import type { AutoCompactionCheckResult } from "@/browser/utils/compaction/autoCompactionCheck";

export interface ChatInputAPI {
focus: () => void;
Expand All @@ -23,6 +24,7 @@ export interface ChatInputWorkspaceVariant {
canInterrupt?: boolean;
disabled?: boolean;
onReady?: (api: ChatInputAPI) => void;
autoCompactionCheck?: AutoCompactionCheckResult; // Computed in parent (AIView) to avoid duplicate calculation
}

// Creation variant: simplified for first message / workspace creation
Expand Down
36 changes: 36 additions & 0 deletions src/browser/components/CompactionWarning.tsx
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
import React from "react";

/**
* Warning banner shown when context usage is approaching the compaction threshold.
*
* Displays progressive warnings:
* - Below threshold: "Context left until Auto-Compact: X% remaining" (where X = threshold - current)
* - At/above threshold: "Approaching context limit. Next message will trigger auto-compaction."
*
* Displayed above ChatInput when:
* - Token usage >= (threshold - 10%) of model's context window
* - Not currently compacting (user can still send messages)
*
* @param usagePercentage - Current token usage as percentage (0-100)
* @param thresholdPercentage - Auto-compaction trigger threshold (0-100, default 70)
*/
export const CompactionWarning: React.FC<{
usagePercentage: number;
thresholdPercentage: number;
}> = (props) => {
// At threshold or above, next message will trigger compaction
const willCompactNext = props.usagePercentage >= props.thresholdPercentage;

// Calculate remaining percentage until threshold
const remaining = props.thresholdPercentage - props.usagePercentage;

const message = willCompactNext
? "⚠️ Context limit reached. Next message will trigger auto-compaction."
: `Context left until Auto-Compact: ${Math.round(remaining)}%`;

return (
<div className="text-plan-mode bg-plan-mode/10 mx-4 my-4 rounded-sm px-4 py-3 text-center text-xs font-medium">
{message}
</div>
);
};
4 changes: 2 additions & 2 deletions src/browser/components/Context1MCheckbox.tsx
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import React from "react";
import { use1MContext } from "@/browser/hooks/use1MContext";
import { supports1MContext } from "@/common/utils/ai/models";
import { TooltipWrapper, Tooltip } from "./Tooltip";
import { TooltipWrapper, Tooltip, HelpIndicator } from "./Tooltip";

interface Context1MCheckboxProps {
modelString: string;
Expand All @@ -22,7 +22,7 @@ export const Context1MCheckbox: React.FC<Context1MCheckboxProps> = ({ modelStrin
1M
</label>
<TooltipWrapper inline>
<span className="text-muted flex cursor-help items-center text-[10px] leading-none">?</span>
<HelpIndicator>?</HelpIndicator>
<Tooltip className="tooltip" align="center" width="auto">
Enable 1M token context window (beta feature for Claude Sonnet 4/4.5)
</Tooltip>
Expand Down
Loading