Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 6 additions & 1 deletion src/utils/chatCommands.ts
Original file line number Diff line number Diff line change
Expand Up @@ -135,7 +135,12 @@ export function prepareCompactionMessage(options: CompactionOptions): {
} {
const targetWords = options.maxOutputTokens ? Math.round(options.maxOutputTokens / 1.3) : 2000;

const messageText = `Summarize this conversation into a compact form for a new Assistant to continue helping the user. Use approximately ${targetWords} words.`;
// Build compaction message with optional continue context
let messageText = `Summarize this conversation into a compact form for a new Assistant to continue helping the user. Use approximately ${targetWords} words.`;

if (options.continueMessage) {
messageText += `\n\nThe user wants to continue with: ${options.continueMessage}`;
}

// Handle model preference (sticky globally)
const effectiveModel = resolveCompactionModel(options.model);
Expand Down
20 changes: 9 additions & 11 deletions src/utils/messages/compactionOptions.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -31,22 +31,20 @@ describe("applyCompactionOverrides", () => {
expect(result.model).toBe("anthropic:claude-haiku-4-5");
});

it("sets thinking to off for Anthropic models", () => {
const compactData: CompactionRequestData = {
it("preserves workspace thinking level for all models", () => {
// Test Anthropic model
const anthropicData: CompactionRequestData = {
model: "anthropic:claude-haiku-4-5",
};
const result = applyCompactionOverrides(baseOptions, compactData);

expect(result.thinkingLevel).toBe("off");
});
const anthropicResult = applyCompactionOverrides(baseOptions, anthropicData);
expect(anthropicResult.thinkingLevel).toBe("medium");

it("preserves workspace thinking level for non-Anthropic models", () => {
const compactData: CompactionRequestData = {
// Test OpenAI model
const openaiData: CompactionRequestData = {
model: "openai:gpt-5-pro",
};
const result = applyCompactionOverrides(baseOptions, compactData);

expect(result.thinkingLevel).toBe("medium");
const openaiResult = applyCompactionOverrides(baseOptions, openaiData);
expect(openaiResult.thinkingLevel).toBe("medium");
});

it("applies maxOutputTokens override", () => {
Expand Down
7 changes: 2 additions & 5 deletions src/utils/messages/compactionOptions.ts
Original file line number Diff line number Diff line change
Expand Up @@ -26,14 +26,11 @@ export function applyCompactionOverrides(
// Use custom model if specified, otherwise use workspace default
const compactionModel = compactData.model ?? baseOptions.model;

// Anthropic models don't support thinking, always use "off"
// Non-Anthropic models keep workspace default (backend will enforce policy)
const isAnthropic = compactionModel.startsWith("anthropic:");

return {
...baseOptions,
model: compactionModel,
thinkingLevel: isAnthropic ? "off" : baseOptions.thinkingLevel,
// Keep workspace default thinking level - all models support thinking now that tools are disabled
thinkingLevel: baseOptions.thinkingLevel,
maxOutputTokens: compactData.maxOutputTokens,
mode: "compact" as const,
toolPolicy: [], // Disable all tools during compaction
Expand Down