Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion src/types/TokenMetrics.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ export interface TokenUsage {
}

export interface TranscriptLine {
message?: { usage?: TokenUsage };
message?: { usage?: TokenUsage; stop_reason?: string | null };
isSidechain?: boolean;
timestamp?: string;
isApiErrorMessage?: boolean;
Expand Down
203 changes: 203 additions & 0 deletions src/utils/__tests__/jsonl-metrics.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -23,12 +23,14 @@ function makeUsageLine(params: {
cacheCreate?: number;
isSidechain?: boolean;
isApiErrorMessage?: boolean;
stopReason?: string | null;
}): string {
return JSON.stringify({
timestamp: params.timestamp,
isSidechain: params.isSidechain,
isApiErrorMessage: params.isApiErrorMessage,
message: {
stop_reason: params.stopReason,
usage: {
input_tokens: params.input,
output_tokens: params.output,
Expand Down Expand Up @@ -159,6 +161,207 @@ describe('jsonl transcript metrics', () => {
});
});

it('skips intermediate streaming entries and only counts final entries per API call', async () => {
const root = fs.mkdtempSync(path.join(os.tmpdir(), 'ccstatusline-jsonl-metrics-'));
tempRoots.push(root);
const transcriptPath = path.join(root, 'streaming.jsonl');

// Simulate two API calls, each with intermediate streaming entries (stop_reason: null)
// and a final entry (stop_reason: "tool_use" or "end_turn")
const lines = [
// API call 1: two intermediates + one final
makeUsageLine({
timestamp: '2026-01-01T10:00:00.000Z',
input: 1,
output: 30,
cacheRead: 12000,
cacheCreate: 11000,
stopReason: null
}),
makeUsageLine({
timestamp: '2026-01-01T10:00:00.000Z',
input: 1,
output: 30,
cacheRead: 12000,
cacheCreate: 11000,
stopReason: null
}),
makeUsageLine({
timestamp: '2026-01-01T10:00:01.000Z',
input: 1,
output: 150,
cacheRead: 12000,
cacheCreate: 11000,
stopReason: 'tool_use'
}),
// API call 2: one intermediate + one final
makeUsageLine({
timestamp: '2026-01-01T10:00:02.000Z',
input: 1,
output: 25,
cacheRead: 23000,
cacheCreate: 500,
stopReason: null
}),
makeUsageLine({
timestamp: '2026-01-01T10:00:03.000Z',
input: 1,
output: 400,
cacheRead: 23000,
cacheCreate: 500,
stopReason: 'end_turn'
})
];

fs.writeFileSync(transcriptPath, lines.join('\n'));

const metrics = await getTokenMetrics(transcriptPath);

// Should only count the two final entries, not the three intermediates
expect(metrics).toEqual({
inputTokens: 2, // 1 + 1
outputTokens: 550, // 150 + 400
cachedTokens: 46500, // (12000 + 11000) + (23000 + 500)
totalTokens: 47052, // 2 + 550 + 46500
contextLength: 23501 // last main-chain final entry: 1 + 23000 + 500
});
});

it('counts the latest in-progress streaming entry once when no finalized row exists yet', async () => {
const root = fs.mkdtempSync(path.join(os.tmpdir(), 'ccstatusline-jsonl-metrics-'));
tempRoots.push(root);
const transcriptPath = path.join(root, 'streaming-in-progress.jsonl');

const lines = [
makeUsageLine({
timestamp: '2026-01-01T10:00:00.000Z',
input: 4,
output: 40,
cacheRead: 1000,
cacheCreate: 200,
stopReason: null
}),
makeUsageLine({
timestamp: '2026-01-01T10:00:01.000Z',
input: 4,
output: 90,
cacheRead: 1000,
cacheCreate: 200,
stopReason: null
}),
makeUsageLine({
timestamp: '2026-01-01T10:00:02.000Z',
input: 4,
output: 140,
cacheRead: 1000,
cacheCreate: 200,
stopReason: null
})
];

fs.writeFileSync(transcriptPath, lines.join('\n'));

const metrics = await getTokenMetrics(transcriptPath);

expect(metrics).toEqual({
inputTokens: 4,
outputTokens: 140,
cachedTokens: 1200,
totalTokens: 1344,
contextLength: 1204
});
});

it('counts finalized streaming entries plus the latest unfinished one during live updates', async () => {
const root = fs.mkdtempSync(path.join(os.tmpdir(), 'ccstatusline-jsonl-metrics-'));
tempRoots.push(root);
const transcriptPath = path.join(root, 'streaming-live-update.jsonl');

const lines = [
makeUsageLine({
timestamp: '2026-01-01T10:00:00.000Z',
input: 2,
output: 25,
cacheRead: 100,
cacheCreate: 50,
stopReason: null
}),
makeUsageLine({
timestamp: '2026-01-01T10:00:01.000Z',
input: 2,
output: 80,
cacheRead: 100,
cacheCreate: 50,
stopReason: 'end_turn'
}),
makeUsageLine({
timestamp: '2026-01-01T10:00:02.000Z',
input: 3,
output: 30,
cacheRead: 200,
cacheCreate: 25,
stopReason: null
}),
makeUsageLine({
timestamp: '2026-01-01T10:00:03.000Z',
input: 3,
output: 120,
cacheRead: 200,
cacheCreate: 25,
stopReason: null
})
];

fs.writeFileSync(transcriptPath, lines.join('\n'));

const metrics = await getTokenMetrics(transcriptPath);

expect(metrics).toEqual({
inputTokens: 5,
outputTokens: 200,
cachedTokens: 375,
totalTokens: 580,
contextLength: 228
});
});

it('falls back to counting all entries when no stop_reason data is present', async () => {
const root = fs.mkdtempSync(path.join(os.tmpdir(), 'ccstatusline-jsonl-metrics-'));
tempRoots.push(root);
const transcriptPath = path.join(root, 'legacy.jsonl');

// Older transcript format without stop_reason
const lines = [
makeUsageLine({
timestamp: '2026-01-01T10:00:00.000Z',
input: 100,
output: 50,
cacheRead: 20,
cacheCreate: 10
}),
makeUsageLine({
timestamp: '2026-01-01T11:00:00.000Z',
input: 200,
output: 80,
cacheRead: 30,
cacheCreate: 20
})
];

fs.writeFileSync(transcriptPath, lines.join('\n'));

const metrics = await getTokenMetrics(transcriptPath);

// Should count all entries since none have stop_reason
expect(metrics).toEqual({
inputTokens: 300,
outputTokens: 130,
cachedTokens: 80,
totalTokens: 510,
contextLength: 250
});
});

it('returns zeroed token metrics when file is missing', async () => {
const metrics = await getTokenMetrics('/tmp/ccstatusline-jsonl-metrics-missing.jsonl');
expect(metrics).toEqual({
Expand Down
56 changes: 42 additions & 14 deletions src/utils/jsonl-metrics.ts
Original file line number Diff line number Diff line change
Expand Up @@ -162,26 +162,54 @@ export async function getTokenMetrics(transcriptPath: string): Promise<TokenMetr
let cachedTokens = 0;
let contextLength = 0;

// Parse each line and sum up token usage for totals
// Parse each line and sum up token usage for totals.
// Claude Code writes multiple JSONL entries per API call during streaming:
// intermediate entries have stop_reason: null, and the final entry has a
// string value like "end_turn" or "tool_use". For streaming-aware
// transcripts, count finalized entries plus the latest unfinished entry so
// live updates do not overcount duplicate partial rows. If the transcript
// format has no stop_reason field at all, fall back to counting all entries.
let mostRecentMainChainEntry: TranscriptLine | null = null;
let mostRecentTimestamp: Date | null = null;

const parsedEntries: TranscriptLine[] = [];
let hasStopReasonField = false;

for (const line of lines) {
const data = parseJsonlLine(line) as TranscriptLine | null;
if (data?.message?.usage) {
inputTokens += data.message.usage.input_tokens || 0;
outputTokens += data.message.usage.output_tokens || 0;
cachedTokens += data.message.usage.cache_read_input_tokens ?? 0;
cachedTokens += data.message.usage.cache_creation_input_tokens ?? 0;

// Track the most recent entry with isSidechain: false (or undefined, which defaults to main chain)
// Also skip API error messages (synthetic messages with 0 tokens)
if (data.isSidechain !== true && data.timestamp && !data.isApiErrorMessage) {
const entryTime = new Date(data.timestamp);
if (!mostRecentTimestamp || entryTime > mostRecentTimestamp) {
mostRecentTimestamp = entryTime;
mostRecentMainChainEntry = data;
}
parsedEntries.push(data);
if (Object.hasOwn(data.message, 'stop_reason')) {
hasStopReasonField = true;
}
}
}

const entriesToCount = hasStopReasonField
? parsedEntries.filter((data, index) => {
const stopReason = data.message?.stop_reason;
return Boolean(stopReason) || (stopReason === null && index === parsedEntries.length - 1);
})
: parsedEntries;

for (const data of entriesToCount) {
const usage = data.message?.usage;
if (!usage) {
continue;
}

inputTokens += usage.input_tokens || 0;
outputTokens += usage.output_tokens || 0;
cachedTokens += usage.cache_read_input_tokens ?? 0;
cachedTokens += usage.cache_creation_input_tokens ?? 0;

// Track the most recent entry with isSidechain: false (or undefined, which defaults to main chain)
// Also skip API error messages (synthetic messages with 0 tokens)
if (data.isSidechain !== true && data.timestamp && !data.isApiErrorMessage) {
const entryTime = new Date(data.timestamp);
if (!mostRecentTimestamp || entryTime > mostRecentTimestamp) {
mostRecentTimestamp = entryTime;
mostRecentMainChainEntry = data;
}
}
}
Expand Down