Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions website/bun.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions website/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@
"@types/node": "^20",
"@types/react": "^19",
"@types/react-dom": "^19",
"gpt-tokenizer": "^3.4.0",
"tailwindcss": "^4",
"typescript": "^5"
},
Expand Down
197 changes: 0 additions & 197 deletions website/scripts/compute-llm-call-tokens.mjs

This file was deleted.

67 changes: 67 additions & 0 deletions website/scripts/compute-llm-call-tokens.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,67 @@
#!/usr/bin/env bun
/**
* One-shot helper: compute the GPT-5-family / o200k_base token count
* of the shared Responses API request bodies used by the sandbox page.
*
* bun scripts/compute-llm-call-tokens.ts
*
* `gpt-tokenizer` is a devDependency for this maintenance script only;
* it is not imported by the website runtime.
*/

import { encode } from "gpt-tokenizer/esm/model/gpt-5";
import {
buildFunctionCallOutput,
buildLlmRequest,
LLM_CALL_TOKENS,
TOOL_CALL_FLOWS,
type ToolFlow,
type ToolFlowKey,
} from "../src/lib/tool-call-comparison";

const tokens = (obj: unknown) => encode(JSON.stringify(obj)).length;

const computed = Object.fromEntries(
(Object.entries(TOOL_CALL_FLOWS) as [ToolFlowKey, ToolFlow][]).map(
([flowKey, flow]) => [
flowKey,
{
in: flow.steps.map((_, i) => tokens(buildLlmRequest(flowKey, i))),
out: flow.steps.map((_, i) =>
tokens(buildFunctionCallOutput(flowKey, i)),
),
},
],
),
) as Record<ToolFlowKey, { in: number[]; out: number[] }>;

const fmt = (label: string, ins: number[], outs: number[]) => {
console.log(`== ${label} ==`);
ins.forEach((tIn, i) => {
console.log(
` step ${i + 1}: in=${String(tIn).padStart(4)} tok out=${String(outs[i]).padStart(3)} tok`,
);
});
const totIn = ins.reduce((s, t) => s + t, 0);
const totOut = outs.reduce((s, t) => s + t, 0);
console.log(
` TOTAL: in=${String(totIn).padStart(4)} tok out=${String(totOut).padStart(3)} tok sum=${totIn + totOut}`,
);
};

for (const [flowKey, flow] of Object.entries(TOOL_CALL_FLOWS) as [
ToolFlowKey,
ToolFlow,
][]) {
if (flowKey !== "bash") console.log();
fmt(`${flow.label} flow`, computed[flowKey].in, computed[flowKey].out);
}

console.log(
`\nFor src/lib/tool-call-comparison.ts LLM_CALL_TOKENS:\n bash: { in: [${computed.bash.in.join(", ")}], out: [${computed.bash.out.join(", ")}] },\n goccia: { in: [${computed.goccia.in.join(", ")}], out: [${computed.goccia.out.join(", ")}] },`,
);

if (JSON.stringify(computed) !== JSON.stringify(LLM_CALL_TOKENS)) {
console.error("\nToken counts in LLM_CALL_TOKENS are stale.");
process.exitCode = 1;
}
Loading
Loading