Skip to content

Commit 221e81e

Browse files
committed
🤖 Refine IPC: Move token consumer calculation to frontend
Inverts ownership for better separation of concerns: - Backend: Just tokenization (single tokens:countBulk endpoint) - Frontend: Display logic (aggregation, percentages, sorting) Benefits: - Simpler API (one endpoint vs separate stats.calculate) - Pure functions = easier testing (no IPC mocking needed) - Frontend can batch all tokenization in one call - Clear responsibilities: backend tokenizes, frontend calculates Changes: - Added tokens:countBulk IPC endpoint - Created consumerCalculator.ts with pure calculation functions - Updated TokenConsumerBreakdown to use new endpoint - Added comprehensive unit tests (12 test cases) - Removed unused styled components from CostsTab Stats calculation now happens entirely in the frontend using raw tokenization results from the backend. The old stats.calculate endpoint remains for backward compatibility but can be deprecated.
1 parent 69a462a commit 221e81e

File tree

8 files changed

+459
-29
lines changed

8 files changed

+459
-29
lines changed

src/components/ChatMetaSidebar/CostsTab.tsx

Lines changed: 1 addition & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -20,15 +20,6 @@ const Section = styled.div`
2020
margin-bottom: 24px;
2121
`;
2222

23-
const SectionTitle = styled.h3<{ dimmed?: boolean }>`
24-
color: ${(props) => (props.dimmed ? "#999999" : "#cccccc")};
25-
font-size: 14px;
26-
font-weight: 600;
27-
margin: 0 0 12px 0;
28-
text-transform: uppercase;
29-
letter-spacing: 0.5px;
30-
`;
31-
3223
const ConsumerList = styled.div`
3324
display: flex;
3425
flex-direction: column;
@@ -116,11 +107,6 @@ const CachedSegment = styled.div<SegmentProps>`
116107
transition: width 0.3s ease;
117108
`;
118109

119-
const LoadingState = styled.div`
120-
color: #888888;
121-
font-style: italic;
122-
`;
123-
124110
const EmptyState = styled.div`
125111
color: #888888;
126112
text-align: center;
@@ -538,7 +524,7 @@ export const CostsTab: React.FC<CostsTabProps> = ({ workspaceId }) => {
538524
</Section>
539525
)}
540526

541-
<TokenConsumerBreakdown workspaceId={workspaceId} model={model} />
527+
<TokenConsumerBreakdown messages={messages} model={model} />
542528
</Container>
543529
);
544530
};

src/components/ChatMetaSidebar/TokenConsumerBreakdown.tsx

Lines changed: 50 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,8 @@
11
import React, { useState, useEffect } from "react";
22
import styled from "@emotion/styled";
33
import type { ChatStats } from "@/types/chatStats";
4+
import type { CmuxMessage } from "@/types/message";
5+
import { prepareTokenization, calculateConsumers } from "@/utils/tokens/consumerCalculator";
46

57
const Section = styled.div`
68
margin-bottom: 24px;
@@ -102,12 +104,12 @@ const formatTokens = (tokens: number): string => {
102104
};
103105

104106
interface TokenConsumerBreakdownProps {
105-
workspaceId: string;
107+
messages: CmuxMessage[];
106108
model: string;
107109
}
108110

109111
export const TokenConsumerBreakdown: React.FC<TokenConsumerBreakdownProps> = ({
110-
workspaceId,
112+
messages,
111113
model,
112114
}) => {
113115
const [stats, setStats] = useState<ChatStats | null>(null);
@@ -116,27 +118,61 @@ export const TokenConsumerBreakdown: React.FC<TokenConsumerBreakdownProps> = ({
116118
useEffect(() => {
117119
let cancelled = false;
118120

119-
setIsCalculating(true);
120-
window.api.stats
121-
.calculate(workspaceId, model)
122-
.then((result) => {
123-
if (!cancelled && result) {
124-
setStats(result);
121+
async function calculate() {
122+
setIsCalculating(true);
123+
124+
try {
125+
// Prepare all text for tokenization (pure function)
126+
const { texts, consumerMap, toolDefinitions } = prepareTokenization(messages, model);
127+
128+
// Combine message texts + tool definition strings for bulk tokenization
129+
const allTexts = [...texts, ...Array.from(toolDefinitions.values())];
130+
131+
// Batch tokenize everything in one IPC call
132+
const tokenCounts = await window.api.tokens.countBulk(model, allTexts);
133+
134+
if (cancelled || !tokenCounts) {
135+
return; // Tokenizer not loaded or component unmounted
125136
}
126-
})
127-
.catch((error) => {
137+
138+
// Split results back into message tokens and tool definition tokens
139+
const messageTokens = tokenCounts.slice(0, texts.length);
140+
const toolDefCounts = new Map<string, number>();
141+
let defIndex = texts.length;
142+
for (const [toolName] of toolDefinitions) {
143+
toolDefCounts.set(toolName, tokenCounts[defIndex]);
144+
defIndex++;
145+
}
146+
147+
// Calculate consumers (pure function)
148+
const consumers = calculateConsumers(messageTokens, consumerMap, toolDefCounts);
149+
const totalTokens = consumers.reduce((sum, c) => sum + c.tokens, 0);
150+
151+
// Derive tokenizer name from model
152+
const tokenizerName = model.startsWith("anthropic:") ? "claude" : "o200k_base";
153+
154+
setStats({
155+
consumers,
156+
totalTokens,
157+
model,
158+
tokenizerName,
159+
usageHistory: [], // Not used in this component
160+
});
161+
} catch (error) {
128162
console.error(`[TokenConsumerBreakdown] Failed to calculate stats:`, error);
129-
})
130-
.finally(() => {
163+
} finally {
131164
if (!cancelled) {
132165
setIsCalculating(false);
133166
}
134-
});
167+
}
168+
}
169+
170+
void calculate();
135171

136172
return () => {
137173
cancelled = true;
138174
};
139-
}, [workspaceId, model]);
175+
}, [messages, model]);
140176

141177
if (isCalculating) {
142178
return (

src/constants/ipc-constants.ts

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -41,6 +41,9 @@ export const IPC_CHANNELS = {
4141
// Stats channels
4242
STATS_CALCULATE: "stats:calculate",
4343

44+
// Token channels
45+
TOKENS_COUNT_BULK: "tokens:countBulk",
46+
4447
// Dynamic channel prefixes
4548
WORKSPACE_CHAT_PREFIX: "workspace:chat:",
4649
WORKSPACE_METADATA: "workspace:metadata",

src/preload.ts

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -114,6 +114,10 @@ const api: IPCApi = {
114114
calculate: (workspaceId: string, model: string) =>
115115
ipcRenderer.invoke(IPC_CHANNELS.STATS_CALCULATE, workspaceId, model),
116116
},
117+
tokens: {
118+
countBulk: (model: string, texts: string[]) =>
119+
ipcRenderer.invoke(IPC_CHANNELS.TOKENS_COUNT_BULK, model, texts),
120+
},
117121
};
118122

119123
// Expose the API along with platform/versions

src/services/ipcMain.ts

Lines changed: 20 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -141,6 +141,7 @@ export class IpcMain {
141141
this.registerDialogHandlers(ipcMain);
142142
this.registerWindowHandlers(ipcMain);
143143
this.registerStatsHandlers(ipcMain);
144+
this.registerTokenHandlers(ipcMain);
144145
this.registerWorkspaceHandlers(ipcMain);
145146
this.registerProviderHandlers(ipcMain);
146147
this.registerProjectHandlers(ipcMain);
@@ -199,6 +200,25 @@ export class IpcMain {
199200
);
200201
}
201202

203+
private registerTokenHandlers(ipcMain: ElectronIpcMain): void {
204+
ipcMain.handle(
205+
IPC_CHANNELS.TOKENS_COUNT_BULK,
206+
async (_event, model: string, texts: string[]) => {
207+
try {
208+
// Dynamic import to lazy-load tokenizer (prevents ~3-4s startup freeze)
209+
/* eslint-disable no-restricted-syntax */
210+
const { getTokenizerForModel } = await import("@/utils/main/tokenizer");
211+
/* eslint-enable no-restricted-syntax */
212+
const tokenizer = getTokenizerForModel(model);
213+
return texts.map((text) => tokenizer.countTokens(text));
214+
} catch (error) {
215+
log.error(`Failed to count tokens for model ${model}:`, error);
216+
return null; // Tokenizer not loaded or error occurred
217+
}
218+
}
219+
);
220+
}
221+
202222
private registerWorkspaceHandlers(ipcMain: ElectronIpcMain): void {
203223
ipcMain.handle(
204224
IPC_CHANNELS.WORKSPACE_CREATE,

src/types/ipc.ts

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -234,4 +234,7 @@ export interface IPCApi {
234234
stats: {
235235
calculate(workspaceId: string, model: string): Promise<ChatStats | null>;
236236
};
237+
tokens: {
238+
countBulk(model: string, texts: string[]): Promise<number[] | null>;
239+
};
237240
}

0 commit comments

Comments
 (0)