From f5b0ac500f92ce1efdbba29232c99829c8656dbf Mon Sep 17 00:00:00 2001 From: Rashid Razak Date: Mon, 11 May 2026 19:06:08 +0800 Subject: [PATCH 01/17] Replace osascript/JXA with native Mach-O agent and SMAppService to fix EDR detection - Add CodeBurnRefreshAgent target: native fire-and-exit binary posting com.codeburn.refresh notification - Rewrite installLaunchAgentIfNeeded(): plist ProgramArguments points to native binary, not osascript/JXA - Rewrite registerLoginItemIfNeeded(): uses SMAppService API instead of osascript/System Events - Add startSocketListener(): Unix domain socket for CLI-triggered menubar refresh - Add src/menubar-socket.ts: CLI-side notifyMenubar() helper wired into status --format menubar-json - Update Package.swift with new product/target, package-app.sh copies agent into bundle Resources - Add tests: plist content verification, login item guard, agent smoke test --- mac/Package.swift | 7 +- mac/Scripts/package-app.sh | 1 + mac/Sources/CodeBurnMenubar/CodeBurnApp.swift | 80 +++++++--- mac/Sources/CodeBurnRefreshAgent/main.swift | 8 + .../EDRDetectionFixTests.swift | 137 ++++++++++++++++++ src/cli.ts | 2 + src/menubar-socket.ts | 12 ++ 7 files changed, 225 insertions(+), 22 deletions(-) create mode 100644 mac/Sources/CodeBurnRefreshAgent/main.swift create mode 100644 mac/Tests/CodeBurnMenubarTests/EDRDetectionFixTests.swift create mode 100644 src/menubar-socket.ts diff --git a/mac/Package.swift b/mac/Package.swift index 67509f27..ff8161e2 100644 --- a/mac/Package.swift +++ b/mac/Package.swift @@ -7,7 +7,8 @@ let package = Package( .macOS(.v14) ], products: [ - .executable(name: "CodeBurnMenubar", targets: ["CodeBurnMenubar"]) + .executable(name: "CodeBurnMenubar", targets: ["CodeBurnMenubar"]), + .executable(name: "CodeBurnRefreshAgent", targets: ["CodeBurnRefreshAgent"]) ], targets: [ .executableTarget( @@ -17,6 +18,10 @@ let package = Package( .enableUpcomingFeature("StrictConcurrency") ] ), + .executableTarget( + name: "CodeBurnRefreshAgent", + path: "Sources/CodeBurnRefreshAgent" + ), .testTarget( name: "CodeBurnMenubarTests", dependencies: ["CodeBurnMenubar"], diff --git a/mac/Scripts/package-app.sh b/mac/Scripts/package-app.sh index 5de94edd..d5c9c581 100755 --- a/mac/Scripts/package-app.sh +++ b/mac/Scripts/package-app.sh @@ -43,6 +43,7 @@ BUNDLE="${DIST_DIR}/${BUNDLE_NAME}" mkdir -p "${BUNDLE}/Contents/MacOS" mkdir -p "${BUNDLE}/Contents/Resources" cp "${BUILT_BINARY}" "${BUNDLE}/Contents/MacOS/${EXECUTABLE_NAME}" +cp "${BIN_PATH}/CodeBurnRefreshAgent" "${BUNDLE}/Contents/Resources/CodeBurnRefreshAgent" cat > "${BUNDLE}/Contents/Info.plist" < diff --git a/mac/Sources/CodeBurnMenubar/CodeBurnApp.swift b/mac/Sources/CodeBurnMenubar/CodeBurnApp.swift index 5868258e..e273905c 100644 --- a/mac/Sources/CodeBurnMenubar/CodeBurnApp.swift +++ b/mac/Sources/CodeBurnMenubar/CodeBurnApp.swift @@ -1,6 +1,7 @@ import SwiftUI import AppKit import Observation +import ServiceManagement private let refreshIntervalSeconds: UInt64 = 30 private let nanosPerSecond: UInt64 = 1_000_000_000 @@ -76,6 +77,7 @@ final class AppDelegate: NSObject, NSApplicationDelegate, NSPopoverDelegate { setupDistributedNotificationListener() installLaunchAgentIfNeeded() registerLoginItemIfNeeded() + startSocketListener() observeSubscriptionDisconnect() Task { await updateChecker.checkIfNeeded() } } @@ -140,6 +142,7 @@ final class AppDelegate: NSObject, NSApplicationDelegate, NSPopoverDelegate { let agentName = "com.codeburn.refresh.plist" let home = fm.homeDirectoryForCurrentUser.path let destPath = "\(home)/Library/LaunchAgents/\(agentName)" + let agentPath = (Bundle.main.resourcePath ?? "") + "/CodeBurnRefreshAgent" let plist = """ @@ -150,11 +153,7 @@ final class AppDelegate: NSObject, NSApplicationDelegate, NSPopoverDelegate { com.codeburn.refresh ProgramArguments - /usr/bin/osascript - -l - JavaScript - -e - ObjC.import("Foundation"); $.NSDistributedNotificationCenter.defaultCenter.postNotificationNameObjectUserInfoDeliverImmediately("com.codeburn.refresh", $(), $(), true) + \(agentPath) StartInterval 30 @@ -188,26 +187,65 @@ final class AppDelegate: NSObject, NSApplicationDelegate, NSPopoverDelegate { } private func registerLoginItemIfNeeded() { - let key = "codeburn.loginItemRegistered" - guard !UserDefaults.standard.bool(forKey: key) else { return } + guard SMAppService.mainApp.status != .enabled else { return } + do { + try SMAppService.mainApp.register() + } catch { + NSLog("CodeBurn: Login item registration failed: \(error)") + } + } + + private func startSocketListener() { + let fm = FileManager.default + let home = fm.homeDirectoryForCurrentUser.path + let cacheDir = "\(home)/.cache/codeburn" + let socketPath = "\(cacheDir)/menubar.sock" - let appPath = Bundle.main.bundlePath - let script = "tell application \"System Events\" to make login item at end with properties {path:\"\(appPath)\", hidden:false}" + try? fm.createDirectory(atPath: cacheDir, withIntermediateDirectories: true) + if fm.fileExists(atPath: socketPath) { + try? fm.removeItem(atPath: socketPath) + } - let process = Process() - process.launchPath = "/usr/bin/osascript" - process.arguments = ["-e", script] - process.standardOutput = FileHandle.nullDevice - process.standardError = FileHandle.nullDevice + let socketFD = Darwin.socket(AF_UNIX, SOCK_STREAM, 0) + guard socketFD >= 0 else { + NSLog("CodeBurn: failed to create socket") + return + } - do { - try process.run() - process.waitUntilExit() - if process.terminationStatus == 0 { - UserDefaults.standard.set(true, forKey: key) + var addr = sockaddr_un() + addr.sun_family = sa_family_t(AF_UNIX) + let socketPathC = (socketPath as NSString).fileSystemRepresentation + withUnsafeMutablePointer(to: &addr.sun_path.0) { ptr in + _ = strcpy(ptr, socketPathC) + } + + let addrSize = socklen_t(MemoryLayout.size) + let bindResult = withUnsafePointer(to: &addr) { + $0.withMemoryRebound(to: sockaddr.self, capacity: 1) { + Darwin.bind(socketFD, $0, addrSize) + } + } + guard bindResult == 0 else { + NSLog("CodeBurn: failed to bind socket at \(socketPath)") + Darwin.close(socketFD) + return + } + + Darwin.listen(socketFD, 5) + + DispatchQueue.global(qos: .background).async { [weak self, socketFD] in + while true { + let clientFD = Darwin.accept(socketFD, nil, nil) + guard clientFD >= 0 else { continue } + var buf: [UInt8] = Array(repeating: 0, count: 1024) + let n = Darwin.read(clientFD, &buf, buf.count) + if n > 0 { + DispatchQueue.main.async { [weak self] in + self?.forceRefresh() + } + } + Darwin.close(clientFD) } - } catch { - NSLog("CodeBurn: Login item registration failed: \(error)") } } diff --git a/mac/Sources/CodeBurnRefreshAgent/main.swift b/mac/Sources/CodeBurnRefreshAgent/main.swift new file mode 100644 index 00000000..56b8ad26 --- /dev/null +++ b/mac/Sources/CodeBurnRefreshAgent/main.swift @@ -0,0 +1,8 @@ +import Foundation + +DistributedNotificationCenter.default().postNotificationName( + .init("com.codeburn.refresh"), + object: nil, + userInfo: nil, + options: .deliverImmediately +) diff --git a/mac/Tests/CodeBurnMenubarTests/EDRDetectionFixTests.swift b/mac/Tests/CodeBurnMenubarTests/EDRDetectionFixTests.swift new file mode 100644 index 00000000..82dc757c --- /dev/null +++ b/mac/Tests/CodeBurnMenubarTests/EDRDetectionFixTests.swift @@ -0,0 +1,137 @@ +import Testing +import Foundation +import ServiceManagement + +private func makePlist(agentPath: String) -> String { + """ + + + + + Label + com.codeburn.refresh + ProgramArguments + + \(agentPath) + + StartInterval + 30 + RunAtLoad + + + +""" +} + +@Suite("LaunchAgent Plist") +struct LaunchAgentPlistTests { + @Test("Plist has correct ProgramArguments") + func programArgumentsIsSingleElementArray() throws { + let plistStr = makePlist(agentPath: "/path/to/CodeBurnRefreshAgent") + let data = Data(plistStr.utf8) + let raw = try PropertyListSerialization.propertyList(from: data, format: nil) + let dict = try #require(raw as? NSDictionary) + let args = try #require(dict["ProgramArguments"] as? [String]) + #expect(args == ["/path/to/CodeBurnRefreshAgent"]) + } + + @Test("Plist has StartInterval of 30") + func startIntervalIs30() throws { + let plistStr = makePlist(agentPath: "/path/to/agent") + let data = Data(plistStr.utf8) + let raw = try PropertyListSerialization.propertyList(from: data, format: nil) + let dict = try #require(raw as? NSDictionary) + let interval = try #require(dict["StartInterval"] as? Int) + #expect(interval == 30) + } + + @Test("Plist has RunAtLoad true") + func runAtLoadIsTrue() throws { + let plistStr = makePlist(agentPath: "/path/to/agent") + let data = Data(plistStr.utf8) + let raw = try PropertyListSerialization.propertyList(from: data, format: nil) + let dict = try #require(raw as? NSDictionary) + let runAtLoad = try #require(dict["RunAtLoad"] as? Bool) + #expect(runAtLoad == true) + } + + @Test("Plist has correct Label") + func labelIsCorrect() throws { + let plistStr = makePlist(agentPath: "/path/to/agent") + let data = Data(plistStr.utf8) + let raw = try PropertyListSerialization.propertyList(from: data, format: nil) + let dict = try #require(raw as? NSDictionary) + let label = try #require(dict["Label"] as? String) + #expect(label == "com.codeburn.refresh") + } + + @Test("Plist idempotency") + func idempotent() { + let a = makePlist(agentPath: "/same/path") + let b = makePlist(agentPath: "/same/path") + #expect(a == b) + } +} + +@Suite("Login Item Guard") +struct LoginItemGuardTests { + @Test("SMAppService.mainApp.status is accessible") + func mainAppStatusIsAccessible() { + // The guard in registerLoginItemIfNeeded(): + // guard SMAppService.mainApp.status != .enabled else { return } + // When status is .enabled, the function returns early (no registration). + // When status is .notRegistered / .requiresApproval, it proceeds to register. + let status = SMAppService.mainApp.status + // In a running app, status is .enabled, .notRegistered, or .requiresApproval. + // In a test context without an app bundle, it may be .notFound (macOS 14+). + let known: Bool = status == .enabled || status == .notRegistered + || status == .requiresApproval || status == .notFound + #expect(known) + } +} + +@Test("CodeBurnRefreshAgent builds and runs successfully") +func agentBuildsAndRuns() throws { + let packageDir = URL(fileURLWithPath: #filePath) + .deletingLastPathComponent() + .deletingLastPathComponent() + .deletingLastPathComponent() + + let scratchDir = FileManager.default.temporaryDirectory + .appendingPathComponent("codeburn-smoke-test-build") + try? FileManager.default.removeItem(at: scratchDir) + + let build = Process() + build.launchPath = "/usr/bin/env" + build.arguments = [ + "swift", "build", "--product", "CodeBurnRefreshAgent", + "--scratch-path", scratchDir.path + ] + build.currentDirectoryURL = packageDir + try build.run() + build.waitUntilExit() + #expect(build.terminationStatus == 0, "Build failed") + + let showPath = Process() + let pipe = Pipe() + showPath.launchPath = "/usr/bin/env" + showPath.arguments = [ + "swift", "build", "--product", "CodeBurnRefreshAgent", + "--scratch-path", scratchDir.path, "--show-bin-path" + ] + showPath.currentDirectoryURL = packageDir + showPath.standardOutput = pipe + try showPath.run() + showPath.waitUntilExit() + + let binPathData = pipe.fileHandleForReading.readDataToEndOfFile() + let binPath = String(data: binPathData, encoding: .utf8)? + .trimmingCharacters(in: .whitespacesAndNewlines) ?? "" + let binaryURL = URL(fileURLWithPath: binPath).appendingPathComponent("CodeBurnRefreshAgent") + + let agent = Process() + agent.launchPath = binaryURL.path + try agent.run() + agent.waitUntilExit() + #expect(agent.terminationStatus == 0, "Agent exited with non-zero status") +} diff --git a/src/cli.ts b/src/cli.ts index 4ebfe337..abc2b0bc 100644 --- a/src/cli.ts +++ b/src/cli.ts @@ -1,5 +1,6 @@ import { Command } from 'commander' import { installMenubarApp } from './menubar-installer.js' +import { notifyMenubar } from './menubar-socket.js' import { exportCsv, exportJson, type PeriodExport } from './export.js' import { loadPricing, setModelAliases } from './models.js' import { parseAllSessions, filterProjectsByName } from './parser.js' @@ -515,6 +516,7 @@ program const optimize = opts.optimize === false ? null : await scanAndDetect(scanProjects, scanRange) console.log(JSON.stringify(buildMenubarPayload(currentData, providers, optimize, dailyHistory))) + notifyMenubar() return } diff --git a/src/menubar-socket.ts b/src/menubar-socket.ts new file mode 100644 index 00000000..fd874d57 --- /dev/null +++ b/src/menubar-socket.ts @@ -0,0 +1,12 @@ +import { connect } from 'node:net' +import { homedir } from 'node:os' +import { join } from 'node:path' + +const SOCKET_PATH = join(homedir(), '.cache', 'codeburn', 'menubar.sock') + +export function notifyMenubar(): void { + const sock = connect(SOCKET_PATH) + sock.on('error', () => {}) + sock.write('refresh\n') + sock.end() +} From 1149ab6e43c99abe41ee32e5a7a8297b38ed704a Mon Sep 17 00:00:00 2001 From: iamtoruk Date: Mon, 11 May 2026 10:57:02 -0700 Subject: [PATCH 02/17] Fix menubar wake recovery and release asset selection --- .github/workflows/release-menubar.yml | 8 ++-- mac/Sources/CodeBurnMenubar/CodeBurnApp.swift | 26 ++++++++++--- .../CodeBurnMenubar/Data/UpdateChecker.swift | 2 +- src/menubar-installer.ts | 31 +++++++++------- tests/menubar-installer.test.ts | 37 +++++++++++++++++++ 5 files changed, 81 insertions(+), 23 deletions(-) create mode 100644 tests/menubar-installer.test.ts diff --git a/.github/workflows/release-menubar.yml b/.github/workflows/release-menubar.yml index 990d4731..b2cf9497 100644 --- a/.github/workflows/release-menubar.yml +++ b/.github/workflows/release-menubar.yml @@ -45,7 +45,9 @@ jobs: uses: actions/upload-artifact@v4 with: name: CodeBurnMenubar-${{ steps.version.outputs.value }} - path: mac/.build/dist/CodeBurnMenubar-*.zip + path: | + mac/.build/dist/CodeBurnMenubar-${{ steps.version.outputs.value }}.zip + mac/.build/dist/CodeBurnMenubar-${{ steps.version.outputs.value }}.zip.sha256 if-no-files-found: error - name: Create / update GitHub Release @@ -66,6 +68,6 @@ jobs: and macOS shows "cannot verify developer", right-click the app in Finder and pick Open to whitelist it once. files: | - mac/.build/dist/CodeBurnMenubar-*.zip - mac/.build/dist/CodeBurnMenubar-*.zip.sha256 + mac/.build/dist/CodeBurnMenubar-${{ steps.version.outputs.value }}.zip + mac/.build/dist/CodeBurnMenubar-${{ steps.version.outputs.value }}.zip.sha256 fail_on_unmatched_files: true diff --git a/mac/Sources/CodeBurnMenubar/CodeBurnApp.swift b/mac/Sources/CodeBurnMenubar/CodeBurnApp.swift index 5868258e..704c7fd9 100644 --- a/mac/Sources/CodeBurnMenubar/CodeBurnApp.swift +++ b/mac/Sources/CodeBurnMenubar/CodeBurnApp.swift @@ -95,6 +95,7 @@ final class AppDelegate: NSObject, NSApplicationDelegate, NSPopoverDelegate { self?.forceRefreshTask = nil self?.forceRefreshStartedAt = nil self?.forceRefreshGeneration &+= 1 + self?.store.resetLoadingState() self?.refreshLoopTask?.cancel() self?.refreshLoopTask = nil } @@ -110,9 +111,7 @@ final class AppDelegate: NSObject, NSApplicationDelegate, NSPopoverDelegate { queue: .main ) { [weak self] _ in Task { @MainActor in - self?.store.resetLoadingState() - self?.forceRefresh() - if self?.refreshLoopTask == nil { self?.startRefreshLoop() } + self?.recoverRefreshPipelineAfterInterruption(resetLoading: true) } } @@ -121,7 +120,9 @@ final class AppDelegate: NSObject, NSApplicationDelegate, NSPopoverDelegate { object: nil, queue: .main ) { [weak self] _ in - Task { @MainActor in self?.forceRefresh() } + Task { @MainActor in + self?.recoverRefreshPipelineAfterInterruption(resetLoading: true) + } } } @@ -131,8 +132,22 @@ final class AppDelegate: NSObject, NSApplicationDelegate, NSPopoverDelegate { object: nil, queue: .main ) { [weak self] _ in - Task { @MainActor in self?.forceRefresh() } + Task { @MainActor in + self?.recoverRefreshPipelineAfterInterruption(resetLoading: false) + } + } + } + + private func recoverRefreshPipelineAfterInterruption(resetLoading: Bool) { + if resetLoading { + store.resetLoadingState() + } else { + _ = store.clearStaleLoadingIfNeeded() + } + if refreshLoopTask == nil { + startRefreshLoop() } + forceRefresh() } private func installLaunchAgentIfNeeded() { @@ -232,6 +247,7 @@ final class AppDelegate: NSObject, NSApplicationDelegate, NSPopoverDelegate { private func forceRefresh() { let now = Date() _ = clearStaleForceRefreshIfNeeded(now: now) + guard forceRefreshTask == nil else { return } guard now.timeIntervalSince(lastRefreshTime) > 5 else { return } lastRefreshTime = now forceRefreshStartedAt = now diff --git a/mac/Sources/CodeBurnMenubar/Data/UpdateChecker.swift b/mac/Sources/CodeBurnMenubar/Data/UpdateChecker.swift index 6ad0a900..ce575b64 100644 --- a/mac/Sources/CodeBurnMenubar/Data/UpdateChecker.swift +++ b/mac/Sources/CodeBurnMenubar/Data/UpdateChecker.swift @@ -46,7 +46,7 @@ final class UpdateChecker { let (data, _) = try await URLSession.shared.data(for: request) let release = try JSONDecoder().decode(GitHubRelease.self, from: data) guard let asset = release.assets.first(where: { - $0.name.hasPrefix("CodeBurnMenubar-") && $0.name.hasSuffix(".zip") + $0.name.hasPrefix("CodeBurnMenubar-v") && $0.name.hasSuffix(".zip") }) else { return } let version = asset.name diff --git a/src/menubar-installer.ts b/src/menubar-installer.ts index 397a81c7..b176deed 100644 --- a/src/menubar-installer.ts +++ b/src/menubar-installer.ts @@ -11,17 +11,28 @@ import { Readable } from 'node:stream' /// newest tagged release; we filter its assets list for our zipped .app bundle. const RELEASE_API = 'https://api.github.com/repos/getagentseal/codeburn/releases/latest' const APP_BUNDLE_NAME = 'CodeBurnMenubar.app' -const ASSET_PATTERN = /^CodeBurnMenubar-.*\.zip$/ -const CHECKSUM_PATTERN = /^CodeBurnMenubar-.*\.zip\.sha256$/ +const VERSIONED_ASSET_PATTERN = /^CodeBurnMenubar-v.+\.zip$/ const APP_PROCESS_NAME = 'CodeBurnMenubar' const SUPPORTED_OS = 'darwin' const MIN_MACOS_MAJOR = 14 export type InstallResult = { installedPath: string; launched: boolean } -type ReleaseAsset = { name: string; browser_download_url: string } -type ReleaseResponse = { tag_name: string; assets: ReleaseAsset[] } -type ResolvedAssets = { zip: ReleaseAsset; checksum: ReleaseAsset | null } +export type ReleaseAsset = { name: string; browser_download_url: string } +export type ReleaseResponse = { tag_name: string; assets: ReleaseAsset[] } +export type ResolvedAssets = { zip: ReleaseAsset; checksum: ReleaseAsset | null } + +export function resolveMenubarReleaseAssets(release: ReleaseResponse): ResolvedAssets { + const zip = release.assets.find(a => VERSIONED_ASSET_PATTERN.test(a.name)) + if (!zip) { + throw new Error( + `No ${APP_BUNDLE_NAME} versioned zip found in release ${release.tag_name}. ` + + `Check https://github.com/getagentseal/codeburn/releases.` + ) + } + const checksum = release.assets.find(a => a.name === `${zip.name}.sha256`) ?? null + return { zip, checksum } +} function userApplicationsDir(): string { return join(homedir(), 'Applications') @@ -71,15 +82,7 @@ async function fetchLatestReleaseAssets(): Promise { throw new Error(`GitHub release lookup failed: HTTP ${response.status}`) } const body = await response.json() as ReleaseResponse - const zip = body.assets.find(a => ASSET_PATTERN.test(a.name)) - if (!zip) { - throw new Error( - `No ${APP_BUNDLE_NAME} zip found in release ${body.tag_name}. ` + - `Check https://github.com/getagentseal/codeburn/releases.` - ) - } - const checksum = body.assets.find(a => CHECKSUM_PATTERN.test(a.name)) ?? null - return { zip, checksum } + return resolveMenubarReleaseAssets(body) } async function verifyChecksum(archivePath: string, checksumUrl: string): Promise { diff --git a/tests/menubar-installer.test.ts b/tests/menubar-installer.test.ts new file mode 100644 index 00000000..44f73cc9 --- /dev/null +++ b/tests/menubar-installer.test.ts @@ -0,0 +1,37 @@ +import { describe, expect, it } from 'vitest' +import { resolveMenubarReleaseAssets, type ReleaseResponse } from '../src/menubar-installer.js' + +function asset(name: string) { + return { name, browser_download_url: `https://example.test/${name}` } +} + +describe('resolveMenubarReleaseAssets', () => { + it('ignores dev zips and pairs the checksum with the versioned zip', () => { + const release: ReleaseResponse = { + tag_name: 'mac-v0.9.8', + assets: [ + asset('CodeBurnMenubar-dev.zip'), + asset('CodeBurnMenubar-dev.zip.sha256'), + asset('CodeBurnMenubar-v0.9.8.zip'), + asset('CodeBurnMenubar-v0.9.8.zip.sha256'), + ], + } + + const resolved = resolveMenubarReleaseAssets(release) + + expect(resolved.zip.name).toBe('CodeBurnMenubar-v0.9.8.zip') + expect(resolved.checksum?.name).toBe('CodeBurnMenubar-v0.9.8.zip.sha256') + }) + + it('fails when a release only contains dev assets', () => { + const release: ReleaseResponse = { + tag_name: 'mac-v0.9.8', + assets: [ + asset('CodeBurnMenubar-dev.zip'), + asset('CodeBurnMenubar-dev.zip.sha256'), + ], + } + + expect(() => resolveMenubarReleaseAssets(release)).toThrow(/versioned zip/) + }) +}) From ce0e1eb116fec1a2fece032f8b636ad7ca4ddd37 Mon Sep 17 00:00:00 2001 From: iamtoruk Date: Mon, 11 May 2026 11:05:50 -0700 Subject: [PATCH 03/17] Make menubar refresh now reset stale state --- mac/Sources/CodeBurnMenubar/AppStore.swift | 11 ++++++ mac/Sources/CodeBurnMenubar/CodeBurnApp.swift | 36 +++++++++++++++++-- 2 files changed, 45 insertions(+), 2 deletions(-) diff --git a/mac/Sources/CodeBurnMenubar/AppStore.swift b/mac/Sources/CodeBurnMenubar/AppStore.swift index 00b27e8b..38d370a9 100644 --- a/mac/Sources/CodeBurnMenubar/AppStore.swift +++ b/mac/Sources/CodeBurnMenubar/AppStore.swift @@ -140,6 +140,17 @@ final class AppStore { inFlightKeys.removeAll() } + func resetRefreshState(clearCache: Bool = false) { + switchTask?.cancel() + switchTask = nil + resetLoadingState() + attemptedKeys.removeAll() + lastErrorByKey.removeAll() + if clearCache { + cache.removeAll() + } + } + private let loadingWatchdogSeconds: TimeInterval = 60 @discardableResult diff --git a/mac/Sources/CodeBurnMenubar/CodeBurnApp.swift b/mac/Sources/CodeBurnMenubar/CodeBurnApp.swift index 704c7fd9..29ad39cd 100644 --- a/mac/Sources/CodeBurnMenubar/CodeBurnApp.swift +++ b/mac/Sources/CodeBurnMenubar/CodeBurnApp.swift @@ -39,6 +39,8 @@ final class AppDelegate: NSObject, NSApplicationDelegate, NSPopoverDelegate { private var forceRefreshTask: Task? private var forceRefreshStartedAt: Date? private var forceRefreshGeneration: UInt64 = 0 + private var manualRefreshTask: Task? + private var manualRefreshGeneration: UInt64 = 0 func applicationWillFinishLaunching(_ notification: Notification) { // Set accessory policy before the app's focus chain forms. On macOS Tahoe @@ -95,6 +97,9 @@ final class AppDelegate: NSObject, NSApplicationDelegate, NSPopoverDelegate { self?.forceRefreshTask = nil self?.forceRefreshStartedAt = nil self?.forceRefreshGeneration &+= 1 + self?.manualRefreshTask?.cancel() + self?.manualRefreshTask = nil + self?.manualRefreshGeneration &+= 1 self?.store.resetLoadingState() self?.refreshLoopTask?.cancel() self?.refreshLoopTask = nil @@ -349,17 +354,44 @@ final class AppDelegate: NSObject, NSApplicationDelegate, NSPopoverDelegate { @MainActor func refreshSubscriptionNow() { - Task { [weak self] in + manualRefreshTask?.cancel() + manualRefreshGeneration &+= 1 + let generation = manualRefreshGeneration + forceRefreshTask?.cancel() + forceRefreshTask = nil + forceRefreshStartedAt = nil + forceRefreshGeneration &+= 1 + pendingRefreshWork?.cancel() + pendingRefreshWork = nil + refreshLoopTask?.cancel() + refreshLoopTask = nil + store.resetRefreshState(clearCache: true) + lastRefreshTime = .distantPast + refreshStatusButton() + + manualRefreshTask = Task { [weak self] in guard let self else { return } // "Refresh Now" should refresh the menubar payload AND every - // connected provider's live quota — the user's intent is "make + // connected provider's live quota. The user's intent is "make // this match reality right now." + let needsTodayTotal = self.store.selectedPeriod != .today || self.store.selectedProvider != .all async let payload: Void = self.store.refresh(includeOptimize: false, force: true, showLoading: true) async let claude: Bool = self.store.refreshSubscriptionReportingSuccess() async let codex: Bool = self.store.refreshCodexReportingSuccess() + if needsTodayTotal { + await self.store.refreshQuietly(period: .today) + } _ = await payload + guard self.manualRefreshGeneration == generation, !Task.isCancelled else { return } + self.lastRefreshTime = Date() + self.refreshStatusButton() if await claude { self.lastSubscriptionRefreshAt = Date() } if await codex { self.lastCodexRefreshAt = Date() } + guard self.manualRefreshGeneration == generation, !Task.isCancelled else { return } + self.manualRefreshTask = nil + if self.refreshLoopTask == nil { + self.startRefreshLoop() + } } } From 33649e0a65cdad469c7c85167a5cf797b5406568 Mon Sep 17 00:00:00 2001 From: iamtoruk Date: Mon, 11 May 2026 11:09:20 -0700 Subject: [PATCH 04/17] Refresh live quota progress from menubar --- mac/Sources/CodeBurnMenubar/CodeBurnApp.swift | 77 +++++++++++++------ 1 file changed, 53 insertions(+), 24 deletions(-) diff --git a/mac/Sources/CodeBurnMenubar/CodeBurnApp.swift b/mac/Sources/CodeBurnMenubar/CodeBurnApp.swift index 29ad39cd..a58d0446 100644 --- a/mac/Sources/CodeBurnMenubar/CodeBurnApp.swift +++ b/mac/Sources/CodeBurnMenubar/CodeBurnApp.swift @@ -6,6 +6,7 @@ private let refreshIntervalSeconds: UInt64 = 30 private let nanosPerSecond: UInt64 = 1_000_000_000 private let refreshIntervalNanos: UInt64 = refreshIntervalSeconds * nanosPerSecond private let forceRefreshWatchdogSeconds: TimeInterval = 90 +private let interactiveQuotaRefreshFloorSeconds: TimeInterval = 30 private let statusItemWidth: CGFloat = NSStatusItem.variableLength private let popoverWidth: CGFloat = 360 private let popoverHeight: CGFloat = 660 @@ -262,7 +263,8 @@ final class AppDelegate: NSObject, NSApplicationDelegate, NSPopoverDelegate { forceRefreshTask = Task { async let main: Void = store.refresh(includeOptimize: false, force: true, showLoading: true) async let today: Void = store.refreshQuietly(period: .today) - _ = await (main, today) + async let quotas: Bool = refreshLiveQuotaProgressIfDue() + _ = await (main, today, quotas) refreshStatusButton() await MainActor.run { [weak self] in guard let self, self.forceRefreshGeneration == generation else { return } @@ -296,6 +298,51 @@ final class AppDelegate: NSObject, NSApplicationDelegate, NSPopoverDelegate { } fileprivate var lastSubscriptionRefreshAt: Date? + fileprivate var lastCodexRefreshAt: Date? + + @discardableResult + private func refreshLiveQuotaProgressIfDue(force: Bool = false) async -> Bool { + let cadence = SubscriptionRefreshCadence.current + if !force && cadence == .manual { return false } + + let now = Date() + let threshold = force ? 0 : TimeInterval(cadence.rawValue) + let shouldRefreshClaude = force || now.timeIntervalSince(lastSubscriptionRefreshAt ?? .distantPast) >= threshold + let shouldRefreshCodex = force || now.timeIntervalSince(lastCodexRefreshAt ?? .distantPast) >= threshold + guard shouldRefreshClaude || shouldRefreshCodex else { return false } + + switch (shouldRefreshClaude, shouldRefreshCodex) { + case (true, true): + async let claude = store.refreshSubscriptionReportingSuccess() + async let codex = store.refreshCodexReportingSuccess() + if await claude { lastSubscriptionRefreshAt = Date() } + if await codex { lastCodexRefreshAt = Date() } + case (true, false): + if await store.refreshSubscriptionReportingSuccess() { + lastSubscriptionRefreshAt = Date() + } + case (false, true): + if await store.refreshCodexReportingSuccess() { + lastCodexRefreshAt = Date() + } + case (false, false): + break + } + return true + } + + private func refreshLiveQuotaProgressForPopoverOpen() { + let now = Date() + let claudeElapsed = now.timeIntervalSince(lastSubscriptionRefreshAt ?? .distantPast) + let codexElapsed = now.timeIntervalSince(lastCodexRefreshAt ?? .distantPast) + guard claudeElapsed >= interactiveQuotaRefreshFloorSeconds || + codexElapsed >= interactiveQuotaRefreshFloorSeconds else { return } + + Task { [weak self] in + guard let self else { return } + _ = await self.refreshLiveQuotaProgressIfDue(force: true) + } + } private func startRefreshLoop() { refreshLoopTask?.cancel() @@ -303,10 +350,7 @@ final class AppDelegate: NSObject, NSApplicationDelegate, NSPopoverDelegate { // Provider refreshes only run when the user has explicitly connected. // Each refresh is a no-op until its corresponding bootstrap flag is set. if let self { - async let claude = self.store.refreshSubscriptionReportingSuccess() - async let codex = self.store.refreshCodexReportingSuccess() - if await claude { self.lastSubscriptionRefreshAt = Date() } - if await codex { self.lastCodexRefreshAt = Date() } + await self.refreshLiveQuotaProgressIfDue(force: true) } while !Task.isCancelled { guard let self else { return } @@ -332,26 +376,12 @@ final class AppDelegate: NSObject, NSApplicationDelegate, NSPopoverDelegate { // (not last attempt) so an intermittent failure doesn't reset // the timer. Each provider has its own anchor so a Codex 429 // doesn't delay a due Claude refresh. - let cadence = SubscriptionRefreshCadence.current - if cadence != .manual { - let claudeElapsed = Date().timeIntervalSince(self.lastSubscriptionRefreshAt ?? .distantPast) - if claudeElapsed >= TimeInterval(cadence.rawValue) { - let succeeded = await self.store.refreshSubscriptionReportingSuccess() - if succeeded { self.lastSubscriptionRefreshAt = Date() } - } - let codexElapsed = Date().timeIntervalSince(self.lastCodexRefreshAt ?? .distantPast) - if codexElapsed >= TimeInterval(cadence.rawValue) { - let succeeded = await self.store.refreshCodexReportingSuccess() - if succeeded { self.lastCodexRefreshAt = Date() } - } - } + await self.refreshLiveQuotaProgressIfDue() try? await Task.sleep(nanoseconds: refreshIntervalNanos) } } } - fileprivate var lastCodexRefreshAt: Date? - @MainActor func refreshSubscriptionNow() { manualRefreshTask?.cancel() @@ -376,8 +406,7 @@ final class AppDelegate: NSObject, NSApplicationDelegate, NSPopoverDelegate { // this match reality right now." let needsTodayTotal = self.store.selectedPeriod != .today || self.store.selectedProvider != .all async let payload: Void = self.store.refresh(includeOptimize: false, force: true, showLoading: true) - async let claude: Bool = self.store.refreshSubscriptionReportingSuccess() - async let codex: Bool = self.store.refreshCodexReportingSuccess() + async let quotas: Bool = self.refreshLiveQuotaProgressIfDue(force: true) if needsTodayTotal { await self.store.refreshQuietly(period: .today) } @@ -385,8 +414,7 @@ final class AppDelegate: NSObject, NSApplicationDelegate, NSPopoverDelegate { guard self.manualRefreshGeneration == generation, !Task.isCancelled else { return } self.lastRefreshTime = Date() self.refreshStatusButton() - if await claude { self.lastSubscriptionRefreshAt = Date() } - if await codex { self.lastCodexRefreshAt = Date() } + _ = await quotas guard self.manualRefreshGeneration == generation, !Task.isCancelled else { return } self.manualRefreshTask = nil if self.refreshLoopTask == nil { @@ -589,6 +617,7 @@ final class AppDelegate: NSObject, NSApplicationDelegate, NSPopoverDelegate { window.collectionBehavior.insert(.canJoinAllSpaces) window.makeKeyAndOrderFront(nil) } + refreshLiveQuotaProgressForPopoverOpen() } } From 469d95631203687442737d53a50710e60833fbf8 Mon Sep 17 00:00:00 2001 From: iamtoruk Date: Mon, 11 May 2026 11:11:37 -0700 Subject: [PATCH 05/17] Preserve menubar bundle seal during install --- mac/Scripts/package-app.sh | 2 +- src/menubar-installer.ts | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/mac/Scripts/package-app.sh b/mac/Scripts/package-app.sh index 5de94edd..ee0dc065 100755 --- a/mac/Scripts/package-app.sh +++ b/mac/Scripts/package-app.sh @@ -96,7 +96,7 @@ codesign --verify --deep --strict "${BUNDLE}" 2>/dev/null || echo " (signature ZIP_NAME="CodeBurnMenubar-${VERSION}.zip" ZIP_PATH="${DIST_DIR}/${ZIP_NAME}" echo "▸ Packaging ${ZIP_NAME}..." -(cd "${DIST_DIR}" && /usr/bin/ditto -c -k --keepParent "${BUNDLE_NAME}" "${ZIP_NAME}") +(cd "${DIST_DIR}" && COPYFILE_DISABLE=1 /usr/bin/ditto -c -k --norsrc --keepParent "${BUNDLE_NAME}" "${ZIP_NAME}") CHECKSUM_NAME="${ZIP_NAME}.sha256" CHECKSUM_PATH="${DIST_DIR}/${CHECKSUM_NAME}" diff --git a/src/menubar-installer.ts b/src/menubar-installer.ts index b176deed..051c12cd 100644 --- a/src/menubar-installer.ts +++ b/src/menubar-installer.ts @@ -182,7 +182,7 @@ export async function installMenubarApp(options: { force?: boolean } = {}): Prom } console.log('Unpacking...') - await runCommand('/usr/bin/unzip', ['-q', archivePath, '-d', stagingDir]) + await runCommand('/usr/bin/ditto', ['-x', '-k', archivePath, stagingDir]) const unpackedApp = join(stagingDir, APP_BUNDLE_NAME) if (!(await exists(unpackedApp))) { From 4737bfb1fa8b4513263f112a2c0c13e48245d302 Mon Sep 17 00:00:00 2001 From: iamtoruk Date: Mon, 11 May 2026 20:03:27 -0700 Subject: [PATCH 06/17] Contribution rules: require real-data testing for new providers, one PR at a time --- .github/PULL_REQUEST_TEMPLATE.md | 18 ++++++++++++++++++ CONTRIBUTING.md | 17 +++++++++++++++++ 2 files changed, 35 insertions(+) create mode 100644 .github/PULL_REQUEST_TEMPLATE.md diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md new file mode 100644 index 00000000..9af748a0 --- /dev/null +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -0,0 +1,18 @@ +## Summary + + + +## Testing + +- [ ] I have tested this locally against real data (not just unit tests) +- [ ] `npm test` passes +- [ ] `npm run build` succeeds + +### For new providers only: + +- [ ] I installed the tool and generated real sessions by using it +- [ ] `npm run dev -- today` shows correct costs and session counts for this provider +- [ ] `npm run dev -- models --provider ` shows correct model names and pricing +- [ ] Screenshot or terminal output attached below proving it works with real data + + diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 84b21f40..aebe0f23 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -84,6 +84,23 @@ The `.github/workflows/block-claude-coauthor.yml` workflow rejects any PR whose If a flagged PR rejects on this check, the workflow prints the exact rebase command to fix it. +## Before You Start + +**Comment on the issue first.** Before writing code for a feature or new provider, leave a comment on the relevant issue saying what you plan to do. Wait for a maintainer to confirm the approach. Unsolicited PRs that duplicate work already in progress or take an incompatible approach will be closed. + +**One PR at a time.** We will not review a second PR from you until the first is merged or closed. This keeps the review queue manageable and ensures each contribution gets proper attention. + +## Adding a New Provider + +New providers have the highest bar because broken parsing silently produces wrong data for users. Before opening a PR: + +1. **Install the tool and use it.** Generate real sessions by actually coding with the provider. We do this ourselves for every provider we ship. +2. **Test against real data.** Run `npm run dev -- today` and `npm run dev -- models` with your real sessions and confirm the output looks correct — costs are non-zero, model names resolve, session counts match what you see in the tool. +3. **Include proof in the PR.** Attach a screenshot or terminal output showing codeburn correctly parsing your real sessions. PRs for new providers without evidence of local testing will not be reviewed. +4. **Do not rely on AI-generated guesses about storage paths or schemas.** Tools change their data formats between versions. The only way to know the current schema is to install the tool and inspect the actual files on disk. + +PRs that add a provider based solely on online documentation or AI-generated code, without evidence of testing against real data, will be closed. + ## Pull Requests 1. Fork or branch from `main`. From 03e22ecb80fbaaeb4e6f824b5724fc9a40233a7a Mon Sep 17 00:00:00 2001 From: AgentSeal Date: Mon, 11 May 2026 20:54:13 -0700 Subject: [PATCH 07/17] Add IBM Bob provider with workspace extraction (#316) * Add IBM Bob provider * Add workspace extraction for Cline-family providers Extract project name from workspace directory in api_conversation_history.json so sessions show actual folder names instead of the provider display name. Thread projectPath through ParsedProviderCall to avoid unsanitizePath mangling hyphenated folder names. --------- Co-authored-by: ozymandiashh <234437643+ozymandiashh@users.noreply.github.com> Co-authored-by: iamtoruk --- CHANGELOG.md | 11 ++ README.md | 7 +- assets/providers/ibm-bob.svg | 6 + docs/architecture.md | 6 +- docs/providers/README.md | 3 +- docs/providers/ibm-bob.md | 55 ++++++ docs/providers/vscode-cline-parser.md | 25 +-- mac/Sources/CodeBurnMenubar/AppStore.swift | 3 + .../CodeBurnMenubar/Views/AgentTabStrip.swift | 1 + package.json | 1 + src/dashboard.tsx | 2 + src/models.ts | 2 + src/parser.ts | 26 +-- src/providers/ibm-bob.ts | 59 +++++++ src/providers/index.ts | 3 +- src/providers/types.ts | 2 + src/providers/vscode-cline-parser.ts | 57 ++++-- tests/provider-registry.test.ts | 2 +- tests/providers/ibm-bob.test.ts | 164 ++++++++++++++++++ 19 files changed, 395 insertions(+), 40 deletions(-) create mode 100644 assets/providers/ibm-bob.svg create mode 100644 docs/providers/ibm-bob.md create mode 100644 src/providers/ibm-bob.ts create mode 100644 tests/providers/ibm-bob.test.ts diff --git a/CHANGELOG.md b/CHANGELOG.md index e7dd43d7..b6d3191d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,16 @@ # Changelog +## Unreleased + +### Added (CLI) +- **IBM Bob provider.** CodeBurn now discovers IBM Bob IDE task history from + `User/globalStorage/ibm.bob-code/tasks//` under both the GA + `IBM Bob` application data folder and preview-era `Bob-IDE` folder. The + provider reuses the Cline-family `ui_messages.json` parser for token/cost + records, reads `api_conversation_history.json` for model tags when present, + falls back to `ibm-bob-auto` pricing otherwise, and appears in CLI, + dashboard, JSON, docs, and the macOS provider tabs. Closes #248. + ## 0.9.8 - 2026-05-10 ### Added (CLI) diff --git a/README.md b/README.md index b3700224..9db2a1f8 100644 --- a/README.md +++ b/README.md @@ -13,7 +13,7 @@ Sponsor

-CodeBurn tracks token usage, cost, and performance across **18 AI coding tools**. It breaks down spending by task type, model, tool, project, and provider so you can see exactly where your budget goes. +CodeBurn tracks token usage, cost, and performance across **19 AI coding tools**. It breaks down spending by task type, model, tool, project, and provider so you can see exactly where your budget goes. Everything runs locally. No wrapper, no proxy, no API keys. CodeBurn reads session data directly from disk and prices every call using [LiteLLM](https://github.com/BerriAI/litellm). @@ -104,6 +104,7 @@ Arrow keys switch between Today, 7 Days, 30 Days, Month, and 6 Months (use `--fr | | cursor-agent | Yes | [cursor-agent.md](docs/providers/cursor-agent.md) | | | Gemini CLI | Yes | [gemini.md](docs/providers/gemini.md) | | | GitHub Copilot | Yes | [copilot.md](docs/providers/copilot.md) | +| | IBM Bob | Yes | [ibm-bob.md](docs/providers/ibm-bob.md) | | | Kiro | Yes | [kiro.md](docs/providers/kiro.md) | | | OpenCode | Yes | [opencode.md](docs/providers/opencode.md) | | | OpenClaw | Yes | [openclaw.md](docs/providers/openclaw.md) | @@ -119,7 +120,7 @@ Arrow keys switch between Today, 7 Days, 30 Days, Month, and 6 Months (use `--fr Each provider doc lists the exact data location, storage format, and known quirks. Linux and Windows paths are detected automatically. If a path has changed or is wrong, please [open an issue](https://github.com/getagentseal/codeburn/issues). -Provider logos are trademarks of their respective owners. The icon set was sourced from [tokscale](https://github.com/junhoyeo/tokscale) (MIT) plus official vendor assets, used under nominative fair use for the purpose of identifying supported tools. +Provider logos are trademarks of their respective owners. The icon set was sourced from [tokscale](https://github.com/junhoyeo/tokscale) (MIT), official vendor assets, and simple provider identifiers, used under nominative fair use for the purpose of identifying supported tools. CodeBurn auto-detects which AI coding tools you use. If multiple providers have session data on disk, press `p` in the dashboard to toggle between them. @@ -378,6 +379,8 @@ These are starting points, not verdicts. A 60% cache hit on a single experimenta **OpenClaw** stores agent sessions as JSONL at `~/.openclaw/agents/*.jsonl`. Also checks legacy paths `.clawdbot`, `.moltbot`, `.moldbot`. Token usage comes from assistant message `usage` blocks; model from `modelId` or `message.model` fields. +**IBM Bob** stores IDE task history in `User/globalStorage/ibm.bob-code/tasks//` under the IBM Bob application data directory. CodeBurn reads `ui_messages.json` for API request token/cost records and `api_conversation_history.json` for the selected model, with support for both GA (`IBM Bob`) and preview (`Bob-IDE`) app data folders. + **Roo Code / KiloCode** are Cline-family VS Code extensions. CodeBurn reads `ui_messages.json` from each task directory in VS Code's `globalStorage`, filtering `type: "say"` entries with `say: "api_req_started"` to extract token counts. CodeBurn deduplicates messages (by API message ID for Claude, by cumulative token cross-check for Codex, by conversation/timestamp for Cursor, by session ID for Gemini, by session+message ID for OpenCode, by responseId for Pi/OMP), filters by date range per entry, and classifies each turn. diff --git a/assets/providers/ibm-bob.svg b/assets/providers/ibm-bob.svg new file mode 100644 index 00000000..ab76047e --- /dev/null +++ b/assets/providers/ibm-bob.svg @@ -0,0 +1,6 @@ + + + + + + diff --git a/docs/architecture.md b/docs/architecture.md index 9b1ea14f..c3a8c253 100644 --- a/docs/architecture.md +++ b/docs/architecture.md @@ -128,14 +128,14 @@ type Provider = { } ``` -`src/providers/index.ts` registers eighteen providers across two tiers: +`src/providers/index.ts` registers nineteen providers across two tiers: -- **Eager**: `claude`, `codex`, `copilot`, `droid`, `gemini`, `kilo-code`, `kiro`, `openclaw`, `pi`, `omp`, `qwen`, `roo-code`. Imported at module load. +- **Eager**: `claude`, `codex`, `copilot`, `droid`, `gemini`, `ibm-bob`, `kilo-code`, `kiro`, `openclaw`, `pi`, `omp`, `qwen`, `roo-code`. Imported at module load. - **Lazy**: `antigravity`, `goose`, `cursor`, `opencode`, `cursor-agent`, `crush`. Imported via dynamic `import()` so the heavy dependencies (SQLite, protobuf) do not touch users who do not have those tools installed. Both lists hit the same `getAllProviders()` aggregator. A failed lazy import is silent and excludes that provider from the run. -`src/providers/vscode-cline-parser.ts` is a shared helper consumed by `kilo-code` and `roo-code`. It is not registered as a provider on its own. +`src/providers/vscode-cline-parser.ts` is a shared helper consumed by `ibm-bob`, `kilo-code`, and `roo-code`. It is not registered as a provider on its own. For the per-provider data location, storage format, parser quirks, and test coverage, see `docs/providers/`. diff --git a/docs/providers/README.md b/docs/providers/README.md index 05f43dbf..600bd60b 100644 --- a/docs/providers/README.md +++ b/docs/providers/README.md @@ -15,6 +15,7 @@ For the architectural picture, see `../architecture.md`. | [Copilot](copilot.md) | JSONL | `src/providers/copilot.ts` | `tests/providers/copilot.test.ts` | | [Droid](droid.md) | JSONL | `src/providers/droid.ts` | `tests/providers/droid.test.ts` | | [Gemini](gemini.md) | JSON / JSONL | `src/providers/gemini.ts` | none | +| [IBM Bob](ibm-bob.md) | JSON | `src/providers/ibm-bob.ts` | `tests/providers/ibm-bob.test.ts` | | [KiloCode](kilo-code.md) | JSON | `src/providers/kilo-code.ts` | `tests/providers/kilo-code.test.ts` | | [Kiro](kiro.md) | JSON | `src/providers/kiro.ts` | `tests/providers/kiro.test.ts` | | [OpenClaw](openclaw.md) | JSONL | `src/providers/openclaw.ts` | `tests/providers/openclaw.test.ts` | @@ -38,7 +39,7 @@ For the architectural picture, see `../architecture.md`. | Helper | Used by | Source | |---|---|---| -| [vscode-cline-parser](vscode-cline-parser.md) | `kilo-code`, `roo-code` | `src/providers/vscode-cline-parser.ts` | +| [vscode-cline-parser](vscode-cline-parser.md) | `ibm-bob`, `kilo-code`, `roo-code` | `src/providers/vscode-cline-parser.ts` | ## File Format diff --git a/docs/providers/ibm-bob.md b/docs/providers/ibm-bob.md new file mode 100644 index 00000000..c9d43737 --- /dev/null +++ b/docs/providers/ibm-bob.md @@ -0,0 +1,55 @@ +# IBM Bob + +IBM Bob IDE task history. + +- **Source:** `src/providers/ibm-bob.ts` +- **Loading:** eager (`src/providers/index.ts`) +- **Test:** `tests/providers/ibm-bob.test.ts` + +## Where It Reads From + +IBM Bob stores IDE task history below `User/globalStorage/ibm.bob-code/tasks/` in the application data directory. + +Default paths checked: + +| Platform | Paths | +|---|---| +| macOS | `~/Library/Application Support/IBM Bob/User/globalStorage/ibm.bob-code/`, `~/Library/Application Support/Bob-IDE/User/globalStorage/ibm.bob-code/` | +| Windows | `%APPDATA%/IBM Bob/User/globalStorage/ibm.bob-code/`, `%APPDATA%/Bob-IDE/User/globalStorage/ibm.bob-code/` | +| Linux | `$XDG_CONFIG_HOME/IBM Bob/User/globalStorage/ibm.bob-code/`, `$XDG_CONFIG_HOME/Bob-IDE/User/globalStorage/ibm.bob-code/` with `~/.config` fallback | + +The `Bob-IDE` paths cover the preview-era app name that some installs used before the GA `IBM Bob` directory. + +## Storage Format + +Each task is a directory under `tasks//` and must contain `ui_messages.json`. + +CodeBurn parses the same Cline-family UI event format used by Roo Code and KiloCode: + +- `ui_messages.json` entries with `type: "say"` and `say: "api_req_started"` contain serialized token/cost metrics. +- `ui_messages.json` user text entries seed the turn's first user message. +- `api_conversation_history.json` is optional and is used to extract the selected model from `...` environment details when present. +- `task_metadata.json` may exist upstream, but CodeBurn does not need it for usage math today. + +If no model tag is present, the parser uses `ibm-bob-auto`, which is priced through the same conservative Sonnet fallback used for Cline-family auto modes. + +## Caching + +None at the provider level. + +## Deduplication + +Per `::` via `vscode-cline-parser.ts`. + +## Quirks + +- IBM Bob has shipped under both `IBM Bob` and `Bob-IDE` application data folder names. +- This provider intentionally covers the IDE task-history format. Bob Shell's `~/.bob` checkpoint data is a separate storage surface and is not parsed until we have a stable usage schema fixture. +- The shared Cline parser does not currently extract individual tool names from UI messages, so tool breakdowns are empty for IBM Bob just like Roo Code and KiloCode. + +## When Fixing A Bug Here + +1. Check whether the install uses `IBM Bob` or `Bob-IDE` as the application data directory. +2. Confirm the task folder still contains `ui_messages.json` and `api_conversation_history.json`. +3. If the UI message schema changed, add a focused fixture to `tests/providers/ibm-bob.test.ts`. +4. If the change also affects Roo Code or KiloCode, update `src/providers/vscode-cline-parser.ts` and run all three provider test files. diff --git a/docs/providers/vscode-cline-parser.md b/docs/providers/vscode-cline-parser.md index 5b6bdfa9..ea68eae7 100644 --- a/docs/providers/vscode-cline-parser.md +++ b/docs/providers/vscode-cline-parser.md @@ -1,17 +1,18 @@ # vscode-cline-parser (Shared Helper) -Shared discovery and parsing for VS Code extensions descended from Cline. +Shared discovery and parsing for Cline-family task folders. - **Source:** `src/providers/vscode-cline-parser.ts` -- **Loading:** not a provider; imported by `kilo-code.ts` and `roo-code.ts`. -- **Test:** none directly. Coverage comes from `tests/providers/kilo-code.test.ts` and `tests/providers/roo-code.test.ts`. +- **Loading:** not a provider; imported by `ibm-bob.ts`, `kilo-code.ts`, and `roo-code.ts`. +- **Test:** none directly. Coverage comes from `tests/providers/ibm-bob.test.ts`, `tests/providers/kilo-code.test.ts`, and `tests/providers/roo-code.test.ts`. ## What it does Two responsibilities: -1. `discoverClineTasks(extensionId)` walks VS Code's `globalStorage//tasks/` directories and returns one source per task that has a `ui_messages.json` file (`vscode-cline-parser.ts:25-50`). -2. `createClineParser` reads each task's `ui_messages.json` and `api_conversation_history.json`, extracts model, tools, and token counts, and yields `ParsedProviderCall` objects. +1. `discoverClineTasks(extensionId)` walks VS Code's `globalStorage//tasks/` directories and returns one source per task that has a `ui_messages.json` file. +2. `discoverClineTasksInBaseDirs(baseDirs)` does the same for non-VS Code apps with compatible task storage, such as IBM Bob. +3. `createClineParser` reads each task's `ui_messages.json` and `api_conversation_history.json`, extracts model and token counts, and yields `ParsedProviderCall` objects. ## Storage layout @@ -25,25 +26,25 @@ Per task directory: ## Model resolution -The model is extracted from `api_conversation_history.json` by searching user message content blocks for a `...` tag (`vscode-cline-parser.ts:54-72`). Falls back to `cline-auto` if no tag is found. +The model is extracted from `api_conversation_history.json` by searching user message content blocks for a `...` tag. Falls back to the provider-supplied auto model (`cline-auto` by default) if no tag is found. ## Token extraction -From `api_req_started` entries inside `ui_messages.json`. Each such entry's `text` field is JSON-parsed; the parsed object holds `tokensIn`, `tokensOut`, `cacheReads`, `cacheWrites`, and (optionally) `cost` (`vscode-cline-parser.ts:119-134`). +From `api_req_started` entries inside `ui_messages.json`. Each such entry's `text` field is JSON-parsed; the parsed object holds `tokensIn`, `tokensOut`, `cacheReads`, `cacheWrites`, and (optionally) `cost`. -If `cost` is present, it is used directly. If not, `calculateCost` from `src/models.ts` computes it from tokens (`vscode-cline-parser.ts:139`). +If `cost` is present, it is used directly. If not, `calculateCost` from `src/models.ts` computes it from tokens. ## Deduplication -Per `::` where `index` is the position of the `api_req_started` entry within `ui_messages.json` (`vscode-cline-parser.ts:109`). +Per `::` where `index` is the position of the `api_req_started` entry within `ui_messages.json`. ## Quirks -- Only the **first** user message is emitted as `userMessage` in the `ParsedProviderCall` (`vscode-cline-parser.ts:157`). Subsequent user turns are accounted but not surfaced. +- Only the **first** user message is emitted as `userMessage` in the `ParsedProviderCall`. Subsequent user turns are accounted but not surfaced. - The model regex looks inside content blocks, not at top-level fields. Some Cline-derivative extensions emit the model elsewhere; if you add support for one, branch on extension ID rather than rewriting the regex. ## When fixing a bug here -1. A change here ripples to **both** KiloCode and Roo Code. Run both test files (`tests/providers/kilo-code.test.ts` and `tests/providers/roo-code.test.ts`) before opening a PR. +1. A change here ripples to IBM Bob, KiloCode, and Roo Code. Run all three provider test files before opening a PR. 2. If you find that one of the two extensions emits a different shape, branch on the extension ID parameter that the discovery function already takes; do not duplicate the parser. -3. If you add support for a third Cline-derivative extension, register it as a thin wrapper file in the same shape as `kilo-code.ts` and `roo-code.ts`. +3. If you add support for another Cline-family task store, register it as a thin wrapper file in the same shape as `ibm-bob.ts`, `kilo-code.ts`, and `roo-code.ts`. diff --git a/mac/Sources/CodeBurnMenubar/AppStore.swift b/mac/Sources/CodeBurnMenubar/AppStore.swift index 38d370a9..ec5fdfa9 100644 --- a/mac/Sources/CodeBurnMenubar/AppStore.swift +++ b/mac/Sources/CodeBurnMenubar/AppStore.swift @@ -736,6 +736,7 @@ enum ProviderFilter: String, CaseIterable, Identifiable { case copilot = "Copilot" case droid = "Droid" case gemini = "Gemini" + case ibmBob = "IBM Bob" case kiro = "Kiro" case kiloCode = "KiloCode" case openclaw = "OpenClaw" @@ -753,6 +754,7 @@ enum ProviderFilter: String, CaseIterable, Identifiable { case .cursor: ["cursor", "cursor agent"] case .rooCode: ["roo-code", "roo code"] case .kiloCode: ["kilo-code", "kilocode"] + case .ibmBob: ["ibm-bob", "ibm bob"] case .openclaw: ["openclaw"] default: [rawValue.lowercased()] } @@ -767,6 +769,7 @@ enum ProviderFilter: String, CaseIterable, Identifiable { case .copilot: "copilot" case .droid: "droid" case .gemini: "gemini" + case .ibmBob: "ibm-bob" case .kiloCode: "kilo-code" case .kiro: "kiro" case .openclaw: "openclaw" diff --git a/mac/Sources/CodeBurnMenubar/Views/AgentTabStrip.swift b/mac/Sources/CodeBurnMenubar/Views/AgentTabStrip.swift index 6561cc97..df47c460 100644 --- a/mac/Sources/CodeBurnMenubar/Views/AgentTabStrip.swift +++ b/mac/Sources/CodeBurnMenubar/Views/AgentTabStrip.swift @@ -345,6 +345,7 @@ extension ProviderFilter { case .copilot: return Color(red: 0x6D/255.0, green: 0x8F/255.0, blue: 0xA6/255.0) case .droid: return Color(red: 0x7C/255.0, green: 0x3A/255.0, blue: 0xED/255.0) case .gemini: return Color(red: 0x44/255.0, green: 0x85/255.0, blue: 0xF4/255.0) + case .ibmBob: return Color(red: 0x0F/255.0, green: 0x62/255.0, blue: 0xFE/255.0) case .kiloCode: return Color(red: 0x00/255.0, green: 0x96/255.0, blue: 0x88/255.0) case .kiro: return Color(red: 0x4A/255.0, green: 0x9E/255.0, blue: 0xC4/255.0) case .openclaw: return Color(red: 0xDA/255.0, green: 0x70/255.0, blue: 0x56/255.0) diff --git a/package.json b/package.json index a58098db..b831b30c 100644 --- a/package.json +++ b/package.json @@ -21,6 +21,7 @@ "claude-code", "cursor", "codex", + "ibm-bob", "opencode", "pi", "ai-coding", diff --git a/src/dashboard.tsx b/src/dashboard.tsx index b46dbcce..e666b183 100644 --- a/src/dashboard.tsx +++ b/src/dashboard.tsx @@ -52,6 +52,7 @@ const PROVIDER_COLORS: Record = { claude: '#FF8C42', codex: '#5BF5A0', cursor: '#00B4D8', + 'ibm-bob': '#0F62FE', opencode: '#A78BFA', pi: '#F472B6', all: '#FF8C42', @@ -513,6 +514,7 @@ const PROVIDER_DISPLAY_NAMES: Record = { claude: 'Claude', codex: 'Codex', cursor: 'Cursor', + 'ibm-bob': 'IBM Bob', opencode: 'OpenCode', pi: 'Pi', } diff --git a/src/models.ts b/src/models.ts index e4441e0a..0d43793f 100644 --- a/src/models.ts +++ b/src/models.ts @@ -166,6 +166,7 @@ const BUILTIN_ALIASES: Record = { 'copilot-auto': 'claude-sonnet-4-5', 'copilot-openai-auto': 'gpt-5.3-codex', 'copilot-anthropic-auto': 'claude-sonnet-4-5', + 'ibm-bob-auto': 'claude-sonnet-4-5', 'kiro-auto': 'claude-sonnet-4-5', 'cline-auto': 'claude-sonnet-4-5', 'openclaw-auto': 'claude-sonnet-4-5', @@ -351,6 +352,7 @@ const autoModelNames: Record = { 'copilot-auto': 'Copilot (auto)', 'copilot-openai-auto': 'Copilot (OpenAI)', 'copilot-anthropic-auto': 'Copilot (Anthropic)', + 'ibm-bob-auto': 'IBM Bob (auto)', 'kiro-auto': 'Kiro (auto)', 'cline-auto': 'Cline (auto)', 'openclaw-auto': 'OpenClaw (auto)', diff --git a/src/parser.ts b/src/parser.ts index 50fa648d..d49697b3 100644 --- a/src/parser.ts +++ b/src/parser.ts @@ -550,7 +550,7 @@ async function parseProviderSources( const provider = await getProvider(providerName) if (!provider) return [] - const sessionMap = new Map() + const sessionMap = new Map() try { for (const source of sources) { @@ -574,13 +574,15 @@ async function parseProviderSources( const turn = providerCallToTurn(call) const classified = classifyTurn(turn) - const key = `${providerName}:${call.sessionId}:${source.project}` + const project = call.project ?? source.project + const key = `${providerName}:${call.sessionId}:${project}` const existing = sessionMap.get(key) if (existing) { existing.turns.push(classified) + if (!existing.projectPath && call.projectPath) existing.projectPath = call.projectPath } else { - sessionMap.set(key, { project: source.project, turns: [classified] }) + sessionMap.set(key, { project, projectPath: call.projectPath, turns: [classified] }) } } } @@ -592,22 +594,26 @@ async function parseProviderSources( } } - const projectMap = new Map() - for (const [key, { project, turns }] of sessionMap) { + const projectMap = new Map() + for (const [key, { project, projectPath, turns }] of sessionMap) { const sessionId = key.split(':')[1] ?? key const session = buildSessionSummary(sessionId, project, turns) if (session.apiCalls > 0) { - const existing = projectMap.get(project) ?? [] - existing.push(session) - projectMap.set(project, existing) + const existing = projectMap.get(project) + if (existing) { + existing.sessions.push(session) + if (!existing.projectPath && projectPath) existing.projectPath = projectPath + } else { + projectMap.set(project, { projectPath, sessions: [session] }) + } } } const projects: ProjectSummary[] = [] - for (const [dirName, sessions] of projectMap) { + for (const [dirName, { projectPath, sessions }] of projectMap) { projects.push({ project: dirName, - projectPath: unsanitizePath(dirName), + projectPath: projectPath ?? unsanitizePath(dirName), sessions, totalCostUSD: sessions.reduce((s, sess) => s + sess.totalCostUSD, 0), totalApiCalls: sessions.reduce((s, sess) => s + sess.apiCalls, 0), diff --git a/src/providers/ibm-bob.ts b/src/providers/ibm-bob.ts new file mode 100644 index 00000000..5aec0f65 --- /dev/null +++ b/src/providers/ibm-bob.ts @@ -0,0 +1,59 @@ +import { join } from 'path' +import { homedir } from 'os' + +import { getShortModelName } from '../models.js' +import { discoverClineTasksInBaseDirs, createClineParser } from './vscode-cline-parser.js' +import type { Provider, SessionSource, SessionParser } from './types.js' + +const PROVIDER_NAME = 'ibm-bob' +const DISPLAY_NAME = 'IBM Bob' +const EXTENSION_ID = 'ibm.bob-code' +const FALLBACK_MODEL = 'ibm-bob-auto' + +export function getIBMBobGlobalStorageDirs(): string[] { + const home = homedir() + if (process.platform === 'darwin') { + return [ + join(home, 'Library', 'Application Support', 'IBM Bob', 'User', 'globalStorage', EXTENSION_ID), + join(home, 'Library', 'Application Support', 'Bob-IDE', 'User', 'globalStorage', EXTENSION_ID), + ] + } + if (process.platform === 'win32') { + const appData = process.env['APPDATA'] ?? join(home, 'AppData', 'Roaming') + return [ + join(appData, 'IBM Bob', 'User', 'globalStorage', EXTENSION_ID), + join(appData, 'Bob-IDE', 'User', 'globalStorage', EXTENSION_ID), + ] + } + const configHome = process.env['XDG_CONFIG_HOME'] ?? join(home, '.config') + return [ + join(configHome, 'IBM Bob', 'User', 'globalStorage', EXTENSION_ID), + join(configHome, 'Bob-IDE', 'User', 'globalStorage', EXTENSION_ID), + ] +} + +export function createIBMBobProvider(overrideDir?: string): Provider { + return { + name: PROVIDER_NAME, + displayName: DISPLAY_NAME, + + modelDisplayName(model: string): string { + return getShortModelName(model) + }, + + toolDisplayName(rawTool: string): string { + return rawTool + }, + + async discoverSessions(): Promise { + const dirs = overrideDir ? [overrideDir] : getIBMBobGlobalStorageDirs() + return discoverClineTasksInBaseDirs(dirs, PROVIDER_NAME, DISPLAY_NAME) + }, + + createSessionParser(source: SessionSource, seenKeys: Set): SessionParser { + return createClineParser(source, seenKeys, PROVIDER_NAME, FALLBACK_MODEL) + }, + } +} + +export const ibmBob = createIBMBobProvider() diff --git a/src/providers/index.ts b/src/providers/index.ts index 38ed4902..551d3a29 100644 --- a/src/providers/index.ts +++ b/src/providers/index.ts @@ -3,6 +3,7 @@ import { codex } from './codex.js' import { copilot } from './copilot.js' import { droid } from './droid.js' import { gemini } from './gemini.js' +import { ibmBob } from './ibm-bob.js' import { kiloCode } from './kilo-code.js' import { kiro } from './kiro.js' import { openclaw } from './openclaw.js' @@ -101,7 +102,7 @@ async function loadCrush(): Promise { } } -const coreProviders: Provider[] = [claude, codex, copilot, droid, gemini, kiloCode, kiro, openclaw, pi, omp, qwen, rooCode] +const coreProviders: Provider[] = [claude, codex, copilot, droid, gemini, ibmBob, kiloCode, kiro, openclaw, pi, omp, qwen, rooCode] export async function getAllProviders(): Promise { const [ag, gs, cursor, opencode, cursorAgent, crush] = await Promise.all([loadAntigravity(), loadGoose(), loadCursor(), loadOpenCode(), loadCursorAgent(), loadCrush()]) diff --git a/src/providers/types.ts b/src/providers/types.ts index 4e9a98a7..90d5e1c2 100644 --- a/src/providers/types.ts +++ b/src/providers/types.ts @@ -27,6 +27,8 @@ export type ParsedProviderCall = { deduplicationKey: string userMessage: string sessionId: string + project?: string + projectPath?: string } export type Provider = { diff --git a/src/providers/vscode-cline-parser.ts b/src/providers/vscode-cline-parser.ts index d1d26c0f..ffad9390 100644 --- a/src/providers/vscode-cline-parser.ts +++ b/src/providers/vscode-cline-parser.ts @@ -24,6 +24,23 @@ export function getVSCodeGlobalStoragePath(extensionId: string): string { export async function discoverClineTasks(extensionId: string, providerName: string, displayName: string, overrideDir?: string): Promise { const baseDir = overrideDir ?? getVSCodeGlobalStoragePath(extensionId) + return discoverClineTasksInBaseDirs([baseDir], providerName, displayName) +} + +export async function discoverClineTasksInBaseDirs(baseDirs: string[], providerName: string, displayName: string): Promise { + const sources: SessionSource[] = [] + const seen = new Set() + for (const baseDir of baseDirs) { + for (const source of await discoverClineTasksInBaseDir(baseDir, providerName, displayName)) { + if (seen.has(source.path)) continue + seen.add(source.path) + sources.push(source) + } + } + return sources +} + +async function discoverClineTasksInBaseDir(baseDir: string, providerName: string, displayName: string): Promise { const tasksDir = join(baseDir, 'tasks') const sources: SessionSource[] = [] @@ -50,28 +67,43 @@ export async function discoverClineTasks(extensionId: string, providerName: stri } const MODEL_TAG_RE = /([^<]+)<\/model>/ +const WORKSPACE_DIR_RE = /Current Workspace Directory \(([^)]+)\)/ + +type HistoryMeta = { model: string; workspace: string | null } -function extractModelFromHistory(taskDir: string): Promise { +function extractHistoryMeta(taskDir: string, fallbackModel: string): Promise { return readFile(join(taskDir, 'api_conversation_history.json'), 'utf-8') .then(raw => { const msgs = JSON.parse(raw) as Array<{ role?: string; content?: Array<{ text?: string }> }> - if (!Array.isArray(msgs)) return 'cline-auto' + if (!Array.isArray(msgs)) return { model: fallbackModel, workspace: null } + let model: string | null = null + let workspace: string | null = null for (const msg of msgs) { if (msg.role !== 'user' || !Array.isArray(msg.content)) continue for (const block of msg.content) { - const match = typeof block.text === 'string' && MODEL_TAG_RE.exec(block.text) - if (match) { - const raw = match[1] - return raw.includes('/') ? raw.split('/').pop()! : raw + if (typeof block.text !== 'string') continue + if (!model) { + const mm = MODEL_TAG_RE.exec(block.text) + if (mm) model = mm[1].includes('/') ? mm[1].split('/').pop()! : mm[1] + } + if (!workspace) { + const wm = WORKSPACE_DIR_RE.exec(block.text) + if (wm) workspace = wm[1] } + if (model && workspace) break } + if (model && workspace) break } - return 'cline-auto' + return { model: model ?? fallbackModel, workspace } }) - .catch(() => 'cline-auto') + .catch(() => ({ model: fallbackModel, workspace: null })) +} + +function workspaceToProject(workspace: string): string { + return basename(workspace) || workspace } -export function createClineParser(source: SessionSource, seenKeys: Set, providerName: string): SessionParser { +export function createClineParser(source: SessionSource, seenKeys: Set, providerName: string, fallbackModel = 'cline-auto'): SessionParser { return { async *parse(): AsyncGenerator { const taskDir = source.path @@ -93,7 +125,10 @@ export function createClineParser(source: SessionSource, seenKeys: Set, if (!Array.isArray(uiMessages)) return - const model = await extractModelFromHistory(taskDir) + const meta = await extractHistoryMeta(taskDir, fallbackModel) + const model = meta.model + const project = meta.workspace ? workspaceToProject(meta.workspace) : undefined + const projectPath = meta.workspace ?? undefined let userMessage = '' for (const msg of uiMessages) { @@ -156,6 +191,8 @@ export function createClineParser(source: SessionSource, seenKeys: Set, deduplicationKey: dedupKey, userMessage: index === 0 ? userMessage : '', sessionId: taskId, + project, + projectPath, } } }, diff --git a/tests/provider-registry.test.ts b/tests/provider-registry.test.ts index 4497946f..2dc1dfc6 100644 --- a/tests/provider-registry.test.ts +++ b/tests/provider-registry.test.ts @@ -3,7 +3,7 @@ import { providers, getAllProviders } from '../src/providers/index.js' describe('provider registry', () => { it('has core providers registered synchronously', () => { - expect(providers.map(p => p.name)).toEqual(['claude', 'codex', 'copilot', 'droid', 'gemini', 'kilo-code', 'kiro', 'openclaw', 'pi', 'omp', 'qwen', 'roo-code']) + expect(providers.map(p => p.name)).toEqual(['claude', 'codex', 'copilot', 'droid', 'gemini', 'ibm-bob', 'kilo-code', 'kiro', 'openclaw', 'pi', 'omp', 'qwen', 'roo-code']) }) it('includes sqlite providers after async load', async () => { diff --git a/tests/providers/ibm-bob.test.ts b/tests/providers/ibm-bob.test.ts new file mode 100644 index 00000000..d61f92e3 --- /dev/null +++ b/tests/providers/ibm-bob.test.ts @@ -0,0 +1,164 @@ +import { describe, it, expect, beforeEach, afterEach } from 'vitest' +import { mkdtemp, mkdir, writeFile, rm } from 'fs/promises' +import { join } from 'path' +import { tmpdir } from 'os' + +import { ibmBob, createIBMBobProvider } from '../../src/providers/ibm-bob.js' +import type { ParsedProviderCall } from '../../src/providers/types.js' + +let tmpDir: string + +function makeUiMessages(opts: { + tokensIn?: number + tokensOut?: number + cacheReads?: number + cacheWrites?: number + cost?: number + userMessage?: string + ts?: number +}): string { + const messages: unknown[] = [] + + if (opts.userMessage) { + messages.push({ type: 'say', say: 'user_feedback', text: opts.userMessage, ts: 1_700_000_000_000 }) + } + + const apiData: Record = { + tokensIn: opts.tokensIn ?? 100, + tokensOut: opts.tokensOut ?? 50, + cacheReads: opts.cacheReads ?? 0, + cacheWrites: opts.cacheWrites ?? 0, + } + if (opts.cost !== undefined) apiData.cost = opts.cost + + messages.push({ + type: 'say', + say: 'api_req_started', + text: JSON.stringify(apiData), + ts: opts.ts ?? 1_700_000_001_000, + }) + + return JSON.stringify(messages) +} + +function makeApiHistory(model?: string): string { + const modelTag = model ? `${model}` : '' + return JSON.stringify([ + { role: 'user', content: [{ type: 'text', text: `hello\n\n${modelTag}\n` }] }, + { role: 'assistant', content: [{ type: 'text', text: 'response' }] }, + ]) +} + +describe('ibm-bob provider - discovery and parsing', () => { + beforeEach(async () => { + tmpDir = await mkdtemp(join(tmpdir(), 'ibm-bob-test-')) + }) + + afterEach(async () => { + await rm(tmpDir, { recursive: true, force: true }) + }) + + it('discovers IBM Bob task directories with ui_messages.json', async () => { + const task1 = join(tmpDir, 'tasks', 'task-a') + const task2 = join(tmpDir, 'tasks', 'task-b') + await mkdir(task1, { recursive: true }) + await mkdir(task2, { recursive: true }) + await writeFile(join(task1, 'ui_messages.json'), '[]') + await writeFile(join(task2, 'ui_messages.json'), '[]') + + const provider = createIBMBobProvider(tmpDir) + const sessions = await provider.discoverSessions() + + expect(sessions).toHaveLength(2) + expect(sessions.every(s => s.provider === 'ibm-bob')).toBe(true) + expect(sessions.every(s => s.project === 'IBM Bob')).toBe(true) + }) + + it('skips tasks without ui_messages.json', async () => { + const task = join(tmpDir, 'tasks', 'task-no-ui') + await mkdir(task, { recursive: true }) + await writeFile(join(task, 'api_conversation_history.json'), '[]') + + const provider = createIBMBobProvider(tmpDir) + const sessions = await provider.discoverSessions() + + expect(sessions).toHaveLength(0) + }) + + it('parses token usage and provider cost from Bob ui messages', async () => { + const taskDir = join(tmpDir, 'tasks', 'task-001') + await mkdir(taskDir, { recursive: true }) + await writeFile(join(taskDir, 'ui_messages.json'), makeUiMessages({ + tokensIn: 250, + tokensOut: 125, + cacheReads: 60, + cacheWrites: 30, + cost: 0.08, + userMessage: 'modernize this class', + })) + await writeFile(join(taskDir, 'api_conversation_history.json'), makeApiHistory('anthropic/claude-sonnet-4-6')) + + const source = { path: taskDir, project: 'IBM Bob', provider: 'ibm-bob' } + const calls: ParsedProviderCall[] = [] + for await (const call of ibmBob.createSessionParser(source, new Set()).parse()) calls.push(call) + + expect(calls).toHaveLength(1) + expect(calls[0]!).toMatchObject({ + provider: 'ibm-bob', + model: 'claude-sonnet-4-6', + inputTokens: 250, + outputTokens: 125, + cacheReadInputTokens: 60, + cacheCreationInputTokens: 30, + costUSD: 0.08, + userMessage: 'modernize this class', + sessionId: 'task-001', + }) + expect(calls[0]!.deduplicationKey).toBe('ibm-bob:task-001:0') + }) + + it('falls back to IBM Bob auto model when history has no model tag', async () => { + const taskDir = join(tmpDir, 'tasks', 'task-002') + await mkdir(taskDir, { recursive: true }) + await writeFile(join(taskDir, 'ui_messages.json'), makeUiMessages({ tokensIn: 100, tokensOut: 50 })) + await writeFile(join(taskDir, 'api_conversation_history.json'), makeApiHistory()) + + const source = { path: taskDir, project: 'IBM Bob', provider: 'ibm-bob' } + const calls: ParsedProviderCall[] = [] + for await (const call of ibmBob.createSessionParser(source, new Set()).parse()) calls.push(call) + + expect(calls).toHaveLength(1) + expect(calls[0]!.model).toBe('ibm-bob-auto') + expect(calls[0]!.costUSD).toBeGreaterThan(0) + }) + + it('deduplicates across parser runs', async () => { + const taskDir = join(tmpDir, 'tasks', 'task-003') + await mkdir(taskDir, { recursive: true }) + await writeFile(join(taskDir, 'ui_messages.json'), makeUiMessages({ tokensIn: 100, tokensOut: 50 })) + + const source = { path: taskDir, project: 'IBM Bob', provider: 'ibm-bob' } + const seenKeys = new Set() + + const calls1: ParsedProviderCall[] = [] + for await (const call of ibmBob.createSessionParser(source, seenKeys).parse()) calls1.push(call) + + const calls2: ParsedProviderCall[] = [] + for await (const call of ibmBob.createSessionParser(source, seenKeys).parse()) calls2.push(call) + + expect(calls1).toHaveLength(1) + expect(calls2).toHaveLength(0) + }) +}) + +describe('ibm-bob provider - metadata', () => { + it('has correct name and displayName', () => { + expect(ibmBob.name).toBe('ibm-bob') + expect(ibmBob.displayName).toBe('IBM Bob') + }) + + it('uses shared short model display names', () => { + expect(ibmBob.modelDisplayName('ibm-bob-auto')).toBe('IBM Bob (auto)') + expect(ibmBob.modelDisplayName('claude-sonnet-4-6')).toBe('Sonnet 4.6') + }) +}) From c85beeaeaeaeec92671ddde6a0d1a385d5ff1d32 Mon Sep 17 00:00:00 2001 From: AgentSeal Date: Mon, 11 May 2026 21:23:04 -0700 Subject: [PATCH 08/17] Fix Claude 1-hour cache write pricing (#317) Co-authored-by: ozymandiashh <234437643+ozymandiashh@users.noreply.github.com> Co-authored-by: iamtoruk --- CHANGELOG.md | 15 ++++---- docs/providers/claude.md | 11 ++++++ src/daily-cache.ts | 25 ++++++------- src/models.ts | 8 ++++- src/parser.ts | 26 +++++++++++++- src/types.ts | 4 +++ tests/daily-cache.test.ts | 30 ++++++++++++++++ tests/models.test.ts | 12 +++++++ tests/parser-claude-cwd.test.ts | 64 +++++++++++++++++++++++++++++---- 9 files changed, 165 insertions(+), 30 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index b6d3191d..d8c11632 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -3,13 +3,14 @@ ## Unreleased ### Added (CLI) -- **IBM Bob provider.** CodeBurn now discovers IBM Bob IDE task history from - `User/globalStorage/ibm.bob-code/tasks//` under both the GA - `IBM Bob` application data folder and preview-era `Bob-IDE` folder. The - provider reuses the Cline-family `ui_messages.json` parser for token/cost - records, reads `api_conversation_history.json` for model tags when present, - falls back to `ibm-bob-auto` pricing otherwise, and appears in CLI, - dashboard, JSON, docs, and the macOS provider tabs. Closes #248. +- **IBM Bob provider.** Discovers IBM Bob IDE task history, reuses the + Cline-family parser for token/cost records, extracts model tags and + workspace-based project names from session data. Closes #248. + +### Fixed (CLI) +- **Claude 1-hour cache write pricing.** 1-hour cache writes are now priced + at 2x base input (previously used the 5-minute 1.25x rate for all writes). + Daily cache bumped to v6 so stale totals are recomputed. Closes #276. ## 0.9.8 - 2026-05-10 diff --git a/docs/providers/claude.md b/docs/providers/claude.md index b0b7b8cf..b5954c1f 100644 --- a/docs/providers/claude.md +++ b/docs/providers/claude.md @@ -25,6 +25,17 @@ JSONL, one event per line, per session file. Sessions live under `/ = { 'claude-opus-4-7': 6, @@ -311,6 +312,7 @@ export function calculateCost( cacheReadTokens: number, webSearchRequests: number, speed: 'standard' | 'fast' = 'standard', + oneHourCacheCreationTokens = 0, ): number { const costs = getModelCosts(model) if (!costs) { @@ -336,11 +338,15 @@ export function calculateCost( // from real spend in aggregate totals. NaN is also handled here; the // arithmetic below short-circuits to 0 when any operand is non-finite. const safe = (n: number) => (Number.isFinite(n) && n > 0 ? n : 0) + const safeOneHourCacheCreation = safe(oneHourCacheCreationTokens) + const safeCacheCreation = Math.max(safe(cacheCreationTokens), safeOneHourCacheCreation) + const safeFiveMinuteCacheCreation = Math.max(0, safeCacheCreation - safeOneHourCacheCreation) return multiplier * ( safe(inputTokens) * costs.inputCostPerToken + safe(outputTokens) * costs.outputCostPerToken + - safe(cacheCreationTokens) * costs.cacheWriteCostPerToken + + safeFiveMinuteCacheCreation * costs.cacheWriteCostPerToken + + safeOneHourCacheCreation * costs.cacheWriteCostPerToken * ONE_HOUR_CACHE_WRITE_MULTIPLIER_FROM_FIVE_MINUTE_RATE + safe(cacheReadTokens) * costs.cacheReadCostPerToken + safe(webSearchRequests) * costs.webSearchCostPerRequest ) diff --git a/src/parser.ts b/src/parser.ts index d49697b3..3bb602e8 100644 --- a/src/parser.ts +++ b/src/parser.ts @@ -92,16 +92,39 @@ function getMessageId(entry: JournalEntry): string | null { return msg?.id ?? null } +function positiveNumber(n: number | undefined): number { + return n !== undefined && Number.isFinite(n) && n > 0 ? n : 0 +} + +function extractClaudeCacheCreation(usage: AssistantMessageContent['usage']): { totalTokens: number; oneHourTokens: number } { + const legacyTotal = positiveNumber(usage.cache_creation_input_tokens) + const cacheCreation = usage.cache_creation + const fiveMinuteTokens = positiveNumber(cacheCreation?.ephemeral_5m_input_tokens) + const oneHourTokens = positiveNumber(cacheCreation?.ephemeral_1h_input_tokens) + const splitTotal = fiveMinuteTokens + oneHourTokens + + if (splitTotal === 0) return { totalTokens: legacyTotal, oneHourTokens: 0 } + + // Valid Claude usage reports the legacy total and split total as equal. + // Keep the larger value so malformed partial splits do not drop tokens. + const totalTokens = Math.max(legacyTotal, splitTotal) + return { + totalTokens, + oneHourTokens: Math.min(oneHourTokens, totalTokens), + } +} + function parseApiCall(entry: JournalEntry): ParsedApiCall | null { if (entry.type !== 'assistant') return null const msg = entry.message as AssistantMessageContent | undefined if (!msg?.usage || !msg?.model) return null const usage = msg.usage + const cacheCreation = extractClaudeCacheCreation(usage) const tokens: TokenUsage = { inputTokens: usage.input_tokens ?? 0, outputTokens: usage.output_tokens ?? 0, - cacheCreationInputTokens: usage.cache_creation_input_tokens ?? 0, + cacheCreationInputTokens: cacheCreation.totalTokens, cacheReadInputTokens: usage.cache_read_input_tokens ?? 0, cachedInputTokens: 0, reasoningTokens: 0, @@ -118,6 +141,7 @@ function parseApiCall(entry: JournalEntry): ParsedApiCall | null { tokens.cacheReadInputTokens, tokens.webSearchRequests, usage.speed ?? 'standard', + cacheCreation.oneHourTokens, ) const bashCmds = extractBashCommandsFromContent(msg.content ?? []) diff --git a/src/types.ts b/src/types.ts index e5562e8f..eecee5c5 100644 --- a/src/types.ts +++ b/src/types.ts @@ -25,6 +25,10 @@ export type ApiUsage = { input_tokens: number output_tokens: number cache_creation_input_tokens?: number + cache_creation?: { + ephemeral_5m_input_tokens?: number + ephemeral_1h_input_tokens?: number + } cache_read_input_tokens?: number server_tool_use?: { web_search_requests?: number diff --git a/tests/daily-cache.test.ts b/tests/daily-cache.test.ts index 5ec26612..2f384ccc 100644 --- a/tests/daily-cache.test.ts +++ b/tests/daily-cache.test.ts @@ -104,6 +104,36 @@ describe('loadDailyCache', () => { expect(existsSync(join(TMP_CACHE_ROOT, 'daily-cache.json.v2.bak'))).toBe(true) }) + it('discards a v5 cache because cached Claude costs predate 1-hour cache pricing', async () => { + const saved = { + version: 5, + lastComputedDate: '2026-05-01', + days: [{ + date: '2026-05-01', + cost: 0.37575, + calls: 1, + sessions: 1, + inputTokens: 0, + outputTokens: 0, + cacheReadTokens: 0, + cacheWriteTokens: 60_120, + editTurns: 0, + oneShotTurns: 0, + models: { 'Opus 4.7': { calls: 1, cost: 0.37575, inputTokens: 0, outputTokens: 0, cacheReadTokens: 0, cacheWriteTokens: 60_120 } }, + categories: {}, + providers: { claude: { calls: 1, cost: 0.37575 } }, + }], + } + const { writeFile, mkdir } = await import('fs/promises') + await mkdir(TMP_CACHE_ROOT, { recursive: true }) + await writeFile(join(TMP_CACHE_ROOT, 'daily-cache.json'), JSON.stringify(saved), 'utf-8') + const cache = await loadDailyCache() + expect(cache.version).toBe(DAILY_CACHE_VERSION) + expect(cache.days).toEqual([]) + expect(cache.lastComputedDate).toBeNull() + expect(existsSync(join(TMP_CACHE_ROOT, 'daily-cache.json.v5.bak'))).toBe(true) + }) + it('round-trips a valid cache through save and load', async () => { const saved: DailyCache = { version: DAILY_CACHE_VERSION, diff --git a/tests/models.test.ts b/tests/models.test.ts index 9fdf87bf..41ccb5e8 100644 --- a/tests/models.test.ts +++ b/tests/models.test.ts @@ -158,6 +158,18 @@ describe('calculateCost - OMP names produce non-zero cost', () => { }) }) +describe('calculateCost - Claude cache write durations', () => { + it('prices 1-hour cache writes at 1.6x the 5-minute cache write rate', () => { + const fiveMinute = calculateCost('claude-opus-4-7', 0, 0, 1_000_000, 0, 0) + const oneHour = calculateCost('claude-opus-4-7', 0, 0, 1_000_000, 0, 0, 'standard', 1_000_000) + const mixed = calculateCost('claude-opus-4-7', 0, 0, 100_000, 0, 0, 'standard', 60_000) + + expect(fiveMinute).toBeCloseTo(6.25, 6) + expect(oneHour).toBeCloseTo(10, 6) + expect(mixed).toBeCloseTo(0.85, 6) + }) +}) + describe('existing model names still resolve', () => { it('canonical claude-opus-4-6', () => { expect(getModelCosts('claude-opus-4-6')).not.toBeNull() diff --git a/tests/parser-claude-cwd.test.ts b/tests/parser-claude-cwd.test.ts index 65c96db0..179ad7cb 100644 --- a/tests/parser-claude-cwd.test.ts +++ b/tests/parser-claude-cwd.test.ts @@ -31,7 +31,14 @@ function dayRange(day: string): DateRange { } } -async function writeClaudeSession(projectSlug: string, sessionId: string, cwd: string, timestamp: string): Promise { +async function writeClaudeSession( + projectSlug: string, + sessionId: string, + cwd: string, + timestamp: string, + usage: Record = { input_tokens: 100, output_tokens: 50 }, + model = 'claude-sonnet-4-5', +): Promise { const projectDir = join(tmpDir, 'projects', projectSlug) await mkdir(projectDir, { recursive: true }) const filePath = join(projectDir, `${sessionId}.jsonl`) @@ -44,12 +51,9 @@ async function writeClaudeSession(projectSlug: string, sessionId: string, cwd: s id: `msg-${sessionId}`, type: 'message', role: 'assistant', - model: 'claude-sonnet-4-5', + model, content: [], - usage: { - input_tokens: 100, - output_tokens: 50, - }, + usage, }, }) + '\n') @@ -158,3 +162,51 @@ describe('Claude cwd project paths', () => { expect(projects[0]!.projectPath).toBe('fallback/slug') }) }) + +describe('Claude cache creation pricing', () => { + it('prices 1-hour cache writes from usage.cache_creation at the 2x input rate', async () => { + await writeClaudeSession( + 'cache-pricing', + 'one-hour-cache', + '/tmp/cache-pricing', + '2099-05-05T10:00:00.000Z', + { + input_tokens: 0, + output_tokens: 0, + cache_creation_input_tokens: 60_120, + cache_creation: { + ephemeral_5m_input_tokens: 0, + ephemeral_1h_input_tokens: 60_120, + }, + }, + 'claude-opus-4-7', + ) + + const projects = await parseAllSessions(dayRange('2099-05-05'), 'claude') + + expect(projects).toHaveLength(1) + expect(projects[0]!.sessions[0]!.totalCacheWriteTokens).toBe(60_120) + expect(projects[0]!.totalCostUSD).toBeCloseTo(0.6012, 6) + }) + + it('falls back to the legacy 5-minute cache write rate when split fields are absent', async () => { + await writeClaudeSession( + 'legacy-cache-pricing', + 'legacy-cache', + '/tmp/legacy-cache-pricing', + '2099-05-06T10:00:00.000Z', + { + input_tokens: 0, + output_tokens: 0, + cache_creation_input_tokens: 60_120, + }, + 'claude-opus-4-7', + ) + + const projects = await parseAllSessions(dayRange('2099-05-06'), 'claude') + + expect(projects).toHaveLength(1) + expect(projects[0]!.sessions[0]!.totalCacheWriteTokens).toBe(60_120) + expect(projects[0]!.totalCostUSD).toBeCloseTo(0.37575, 6) + }) +}) From a1b5e4bd00012de9b72efc67edabd4c8c0740e62 Mon Sep 17 00:00:00 2001 From: AgentSeal Date: Mon, 11 May 2026 21:30:27 -0700 Subject: [PATCH 09/17] Fix OpenCode MCP usage reporting (#318) * Fix OpenCode MCP usage reporting * Move OpenCode MCP changelog entry to Unreleased section --------- Co-authored-by: ozymandiashh <234437643+ozymandiashh@users.noreply.github.com> Co-authored-by: iamtoruk --- CHANGELOG.md | 4 ++ docs/providers/opencode.md | 14 ++-- src/providers/opencode.ts | 21 +++++- tests/providers/opencode.test.ts | 118 +++++++++++++++++++++++++++++++ 4 files changed, 151 insertions(+), 6 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index d8c11632..8f706161 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -11,6 +11,10 @@ - **Claude 1-hour cache write pricing.** 1-hour cache writes are now priced at 2x base input (previously used the 5-minute 1.25x rate for all writes). Daily cache bumped to v6 so stale totals are recomputed. Closes #276. +- **OpenCode MCP usage now counted.** OpenCode stores MCP tool calls as + `_` names, which the shared MCP pipeline did not recognize. + The provider now normalizes these to the canonical `mcp____` + form so MCP breakdowns and `optimize` work correctly. Closes #308. ## 0.9.8 - 2026-05-10 diff --git a/docs/providers/opencode.md b/docs/providers/opencode.md index 0251fcd4..0148cc91 100644 --- a/docs/providers/opencode.md +++ b/docs/providers/opencode.md @@ -4,7 +4,7 @@ OpenCode (sst/opencode). - **Source:** `src/providers/opencode.ts` - **Loading:** lazy (`src/providers/index.ts:59-75`) -- **Test:** `tests/providers/opencode.test.ts` (558 lines, the largest provider test) +- **Test:** `tests/providers/opencode.test.ts` (676 lines, the largest provider test) ## Where it reads from @@ -20,14 +20,18 @@ None. ## Deduplication -Per `:` (`opencode.ts:242`). +Per `:`. ## Quirks -- **Schema validation is loud.** When a required table is missing, the parser logs an actionable warning telling the user which table is gone and what version of OpenCode it expects (`opencode.ts:104-131`). This is the right behavior; do not silently swallow these. -- Source paths are encoded as `:` (`opencode.ts:147-150`). -- Each message's `parts` are indexed (`opencode.ts:177-191`); preserving the order matters for reasoning-token correctness. +- **Schema validation is loud.** When a required table is missing, the parser logs an actionable warning telling the user which table is gone and what version of OpenCode it expects. This is the right behavior; do not silently swallow these. +- Source paths are encoded as `:`. +- Each message's `parts` are indexed; preserving the order matters for reasoning-token correctness. - Tokens are reported across `input`, `output`, `reasoning`, `cache.read`, and `cache.write`. Anthropic semantics. +- External MCP tools are stored as `_` names (for example + `clickup_clickup_get_task`). The provider normalizes those to CodeBurn's + canonical `mcp____` names before aggregation so shared MCP + panels and `optimize` findings count OpenCode usage. ## When fixing a bug here diff --git a/src/providers/opencode.ts b/src/providers/opencode.ts index b39230c7..5a2546f3 100644 --- a/src/providers/opencode.ts +++ b/src/providers/opencode.ts @@ -64,6 +64,25 @@ const toolNameMap: Record = { patch: 'Patch', } +function normalizeToolName(rawTool?: string): string { + if (!rawTool) return '' + if (rawTool.startsWith('mcp__')) return rawTool + + const builtIn = toolNameMap[rawTool] + if (builtIn) return builtIn + + // OpenCode stores MCP calls as `_` with no separate server field. + // Built-ins are handled above, and server ids are assumed not to contain `_`. + const serverSeparator = rawTool.indexOf('_') + if (serverSeparator > 0 && serverSeparator < rawTool.length - 1) { + const server = rawTool.slice(0, serverSeparator) + const tool = rawTool.slice(serverSeparator + 1) + return `mcp__${server}__${tool}` + } + + return rawTool +} + function sanitize(dir: string): string { return dir.replace(/^\//, '').replace(/\//g, '-') } @@ -232,7 +251,7 @@ function createParser( const msgParts = partsByMsg.get(msg.id) ?? [] const toolParts = msgParts.filter((p) => p.type === 'tool') const tools = toolParts - .map((p) => toolNameMap[p.tool ?? ''] ?? p.tool ?? '') + .map((p) => normalizeToolName(p.tool)) .filter(Boolean) const bashCommands = toolParts diff --git a/tests/providers/opencode.test.ts b/tests/providers/opencode.test.ts index bd715bee..3637b79c 100644 --- a/tests/providers/opencode.test.ts +++ b/tests/providers/opencode.test.ts @@ -337,6 +337,124 @@ skipUnlessSqlite('opencode provider - session parsing', () => { expect(call.deduplicationKey).toBe('opencode:sess-1:msg-2') }) + it('normalizes opencode MCP tool names for shared MCP reporting', async () => { + const dbPath = createTestDb(tmpDir) + withTestDb(dbPath, (db) => { + insertSession(db, 'sess-1') + + insertMessage(db, 'msg-1', 'sess-1', 1700000000000, { role: 'user' }) + insertPart(db, 'part-1', 'msg-1', 'sess-1', { type: 'text', text: 'look up the ClickUp task' }) + + insertMessage(db, 'msg-2', 'sess-1', 1700000001000, { + role: 'assistant', + modelID: 'claude-opus-4-6', + cost: 0.05, + tokens: { input: 100, output: 200, reasoning: 0, cache: { read: 0, write: 0 } }, + }) + insertPart(db, 'part-2', 'msg-2', 'sess-1', { + type: 'tool', + tool: 'clickup_clickup_get_task', + state: { status: 'completed', input: {} }, + }) + insertPart(db, 'part-3', 'msg-2', 'sess-1', { + type: 'tool', + tool: 'figma_get_file', + state: { status: 'completed', input: {} }, + }) + }) + + const calls = await collectCalls(createOpenCodeProvider(tmpDir), dbPath, 'sess-1') + + expect(calls).toHaveLength(1) + expect(calls[0]!.tools).toEqual([ + 'mcp__clickup__clickup_get_task', + 'mcp__figma__get_file', + ]) + }) + + it('preserves already-normalized MCP tool names', async () => { + const dbPath = createTestDb(tmpDir) + withTestDb(dbPath, (db) => { + insertSession(db, 'sess-1') + insertMessage(db, 'msg-1', 'sess-1', 1700000001000, { + role: 'assistant', + modelID: 'claude-opus-4-6', + cost: 0.05, + tokens: { input: 100, output: 200, reasoning: 0, cache: { read: 0, write: 0 } }, + }) + insertPart(db, 'part-1', 'msg-1', 'sess-1', { + type: 'tool', + tool: 'mcp__github__search_code', + state: { status: 'completed', input: {} }, + }) + }) + + const calls = await collectCalls(createOpenCodeProvider(tmpDir), dbPath, 'sess-1') + + expect(calls).toHaveLength(1) + expect(calls[0]!.tools).toEqual(['mcp__github__search_code']) + }) + + it('keeps extension tool names without a server prefix as regular tools', async () => { + const dbPath = createTestDb(tmpDir) + withTestDb(dbPath, (db) => { + insertSession(db, 'sess-1') + insertMessage(db, 'msg-1', 'sess-1', 1700000001000, { + role: 'assistant', + modelID: 'claude-opus-4-6', + cost: 0.05, + tokens: { input: 100, output: 200, reasoning: 0, cache: { read: 0, write: 0 } }, + }) + insertPart(db, 'part-1', 'msg-1', 'sess-1', { + type: 'tool', + tool: 'customtool', + state: { status: 'completed', input: {} }, + }) + }) + + const calls = await collectCalls(createOpenCodeProvider(tmpDir), dbPath, 'sess-1') + + expect(calls).toHaveLength(1) + expect(calls[0]!.tools).toEqual(['customtool']) + }) + + it('keeps malformed server-prefixed tool names as regular tools', async () => { + const dbPath = createTestDb(tmpDir) + withTestDb(dbPath, (db) => { + insertSession(db, 'sess-1') + insertMessage(db, 'msg-1', 'sess-1', 1700000001000, { + role: 'assistant', + modelID: 'claude-opus-4-6', + cost: 0.05, + tokens: { input: 100, output: 200, reasoning: 0, cache: { read: 0, write: 0 } }, + }) + insertPart(db, 'part-1', 'msg-1', 'sess-1', { + type: 'tool', + tool: '_missing_server', + state: { status: 'completed', input: {} }, + }) + insertPart(db, 'part-2', 'msg-1', 'sess-1', { + type: 'tool', + tool: 'missing_', + state: { status: 'completed', input: {} }, + }) + insertPart(db, 'part-3', 'msg-1', 'sess-1', { + type: 'tool', + tool: '_', + state: { status: 'completed', input: {} }, + }) + }) + + const calls = await collectCalls(createOpenCodeProvider(tmpDir), dbPath, 'sess-1') + + expect(calls).toHaveLength(1) + expect(calls[0]!.tools).toEqual([ + '_missing_server', + 'missing_', + '_', + ]) + }) + it('skips zero-token messages with zero cost', async () => { const dbPath = createTestDb(tmpDir) withTestDb(dbPath, (db) => { From 38e41e93c38c040c18778d829112e1947b5a4f04 Mon Sep 17 00:00:00 2001 From: Resham Joshi <65915470+iamtoruk@users.noreply.github.com> Date: Mon, 11 May 2026 21:50:17 -0700 Subject: [PATCH 10/17] Add Node version guard for unsupported runtimes (#319) Split CLI into a tiny launcher (src/cli.ts) that checks for Node >= 22.13.0 before dynamically importing the full CLI (src/main.ts). Users on Node 18 now get a clear upgrade message instead of a cryptic regex parse error from string-width. Closes #232. --- package.json | 4 +- src/cli.ts | 991 +------------------------------------------------ src/main.ts | 978 ++++++++++++++++++++++++++++++++++++++++++++++++ tsup.config.ts | 5 +- 4 files changed, 995 insertions(+), 983 deletions(-) create mode 100644 src/main.ts diff --git a/package.json b/package.json index b831b30c..72dd6dbc 100644 --- a/package.json +++ b/package.json @@ -12,7 +12,7 @@ ], "scripts": { "bundle-litellm": "node scripts/bundle-litellm.mjs", - "build": "node scripts/bundle-litellm.mjs && tsup", + "build": "node scripts/bundle-litellm.mjs && tsup && node -e \"require('fs').copyFileSync('src/cli.ts','dist/cli.js')\"", "dev": "tsx src/cli.ts", "test": "vitest", "prepublishOnly": "npm run build" @@ -31,7 +31,7 @@ "developer-tools" ], "engines": { - "node": ">=22" + "node": ">=22.13.0" }, "author": "AgentSeal ", "license": "MIT", diff --git a/src/cli.ts b/src/cli.ts index 4ebfe337..dec3d494 100644 --- a/src/cli.ts +++ b/src/cli.ts @@ -1,978 +1,15 @@ -import { Command } from 'commander' -import { installMenubarApp } from './menubar-installer.js' -import { exportCsv, exportJson, type PeriodExport } from './export.js' -import { loadPricing, setModelAliases } from './models.js' -import { parseAllSessions, filterProjectsByName } from './parser.js' -import { convertCost } from './currency.js' -import { renderStatusBar } from './format.js' -import { type PeriodData, type ProviderCost } from './menubar-json.js' -import { buildMenubarPayload } from './menubar-json.js' -import { getDaysInRange, ensureCacheHydrated, emptyCache, BACKFILL_DAYS, toDateString } from './daily-cache.js' -import { aggregateProjectsIntoDays, buildPeriodDataFromDays, dateKey } from './day-aggregator.js' -import { CATEGORY_LABELS, type DateRange, type ProjectSummary, type TaskCategory } from './types.js' -import { aggregateModelEfficiency } from './model-efficiency.js' -import { renderDashboard } from './dashboard.js' -import { formatDateRangeLabel, parseDateRangeFlags, getDateRange, toPeriod, type Period } from './cli-date.js' -import { runOptimize, scanAndDetect } from './optimize.js' -import { renderCompare } from './compare.js' -import { getAllProviders } from './providers/index.js' -import { clearPlan, readConfig, readPlan, saveConfig, savePlan, getConfigFilePath, type PlanId } from './config.js' -import { clampResetDay, getPlanUsageOrNull, type PlanUsage } from './plan-usage.js' -import { getPresetPlan, isPlanId, isPlanProvider, planDisplayName } from './plans.js' -import { createRequire } from 'node:module' - -const require = createRequire(import.meta.url) -const { version } = require('../package.json') -import { loadCurrency, getCurrency, isValidCurrencyCode } from './currency.js' - -async function hydrateCache() { - try { - return await ensureCacheHydrated( - (range) => parseAllSessions(range, 'all'), - aggregateProjectsIntoDays, - ) - } catch { - return emptyCache() - } -} - -function collect(val: string, acc: string[]): string[] { - acc.push(val) - return acc -} - -function parseNumber(value: string): number { - return Number(value) -} - -function parseInteger(value: string): number { - return parseInt(value, 10) -} - -type JsonPlanSummary = { - id: PlanId - budget: number - spent: number - percentUsed: number - status: 'under' | 'near' | 'over' - projectedMonthEnd: number - daysUntilReset: number - periodStart: string - periodEnd: string -} - -function toJsonPlanSummary(planUsage: PlanUsage): JsonPlanSummary { - return { - id: planUsage.plan.id, - budget: convertCost(planUsage.budgetUsd), - spent: convertCost(planUsage.spentApiEquivalentUsd), - percentUsed: Math.round(planUsage.percentUsed * 10) / 10, - status: planUsage.status, - projectedMonthEnd: convertCost(planUsage.projectedMonthUsd), - daysUntilReset: planUsage.daysUntilReset, - periodStart: planUsage.periodStart.toISOString(), - periodEnd: planUsage.periodEnd.toISOString(), - } -} - -function assertFormat(value: string, allowed: readonly string[], command: string): void { - if (!allowed.includes(value)) { - process.stderr.write( - `codeburn ${command}: unknown format "${value}". Valid values: ${allowed.join(', ')}.\n` - ) - process.exit(1) - } -} - -async function runJsonReport(period: Period, provider: string, project: string[], exclude: string[]): Promise { - await loadPricing() - const { range, label } = getDateRange(period) - const projects = filterProjectsByName(await parseAllSessions(range, provider), project, exclude) - const report: ReturnType & { plan?: JsonPlanSummary } = buildJsonReport(projects, label, period) - const planUsage = await getPlanUsageOrNull() - if (planUsage) { - report.plan = toJsonPlanSummary(planUsage) - } - console.log(JSON.stringify(report, null, 2)) -} - -const program = new Command() - .name('codeburn') - .description('See where your AI coding tokens go - by task, tool, model, and project') - .version(version) - .option('--verbose', 'print warnings to stderr on read failures and skipped files') - .option('--timezone ', 'IANA timezone for date grouping (e.g. Asia/Tokyo, America/New_York)') - -program.hook('preAction', async (thisCommand) => { - const tz = thisCommand.opts<{ timezone?: string }>().timezone ?? process.env['CODEBURN_TZ'] - if (tz) { - try { - Intl.DateTimeFormat(undefined, { timeZone: tz }) - } catch { - console.error(`\n Invalid timezone: "${tz}". Use an IANA timezone like "America/New_York" or "Asia/Tokyo".\n`) - process.exit(1) - } - process.env.TZ = tz - } - const config = await readConfig() - setModelAliases(config.modelAliases ?? {}) - if (thisCommand.opts<{ verbose?: boolean }>().verbose) { - process.env['CODEBURN_VERBOSE'] = '1' - } - await loadCurrency() +#!/usr/bin/env node +// This launcher must stay parseable by Node 18. Do NOT add static imports. +const [major, minor] = process.versions.node.split('.').map(Number) +if (major < 22 || (major === 22 && minor < 13)) { + process.stderr.write( + `codeburn requires Node.js >= 22.13.0 (current: ${process.version})\n` + + 'Upgrade at https://nodejs.org/\n', + ) + process.exit(1) +} + +import('./main.js').catch((err) => { + process.stderr.write(String(err?.message ?? err) + '\n') + process.exit(1) }) - -function buildJsonReport(projects: ProjectSummary[], period: string, periodKey: string) { - const sessions = projects.flatMap(p => p.sessions) - const { code } = getCurrency() - - const totalCostUSD = projects.reduce((s, p) => s + p.totalCostUSD, 0) - const totalCalls = projects.reduce((s, p) => s + p.totalApiCalls, 0) - const totalSessions = projects.reduce((s, p) => s + p.sessions.length, 0) - const totalInput = sessions.reduce((s, sess) => s + sess.totalInputTokens, 0) - const totalOutput = sessions.reduce((s, sess) => s + sess.totalOutputTokens, 0) - const totalCacheRead = sessions.reduce((s, sess) => s + sess.totalCacheReadTokens, 0) - const totalCacheWrite = sessions.reduce((s, sess) => s + sess.totalCacheWriteTokens, 0) - // Match src/menubar-json.ts:cacheHitPercent: reads over reads+fresh-input. cache_write - // counts tokens being stored, not served, so it doesn't belong in the denominator. - const cacheHitDenom = totalInput + totalCacheRead - const cacheHitPercent = cacheHitDenom > 0 ? Math.round((totalCacheRead / cacheHitDenom) * 1000) / 10 : 0 - - // Per-day rollup. Mirrors parser.ts categoryBreakdown semantics so a - // consumer summing daily[].editTurns over a period gets the same total as - // sum(activities[].editTurns) for that period: every turn counts once for - // `turns`, edit turns count for `editTurns`, edit turns with zero retries - // count for `oneShotTurns`. Issue #279 — daily-resolution efficiency - // dashboards need this without re-deriving from activity-level rollups. - const dailyMap: Record = {} - for (const sess of sessions) { - for (const turn of sess.turns) { - // Prefer the user-message timestamp on the turn; fall back to the first - // assistant-call timestamp when the user line is missing (continuation - // sessions where the JSONL begins mid-conversation). Previously these - // turns dropped from daily but stayed in activities, breaking the - // sum(daily[].editTurns) === sum(activities[].editTurns) invariant. - const ts = turn.timestamp || turn.assistantCalls[0]?.timestamp - if (!ts) { continue } - const day = dateKey(ts) - if (!dailyMap[day]) { dailyMap[day] = { cost: 0, calls: 0, turns: 0, editTurns: 0, oneShotTurns: 0 } } - dailyMap[day].turns += 1 - if (turn.hasEdits) { - dailyMap[day].editTurns += 1 - if (turn.retries === 0) dailyMap[day].oneShotTurns += 1 - } - for (const call of turn.assistantCalls) { - dailyMap[day].cost += call.costUSD - dailyMap[day].calls += 1 - } - } - } - const daily = Object.entries(dailyMap).sort().map(([date, d]) => ({ - date, - cost: convertCost(d.cost), - calls: d.calls, - turns: d.turns, - editTurns: d.editTurns, - oneShotTurns: d.oneShotTurns, - // Pre-computed convenience for dashboards that don't want to do the math. - // null when there are no edit turns (the rate is undefined, not zero — - // a day where the user only had Q&A turns shouldn't read as 0% one-shot). - oneShotRate: d.editTurns > 0 - ? Math.round((d.oneShotTurns / d.editTurns) * 1000) / 10 - : null, - })) - - const projectList = projects.map(p => ({ - name: p.project, - path: p.projectPath, - cost: convertCost(p.totalCostUSD), - avgCostPerSession: p.sessions.length > 0 - ? convertCost(p.totalCostUSD / p.sessions.length) - : null, - calls: p.totalApiCalls, - sessions: p.sessions.length, - })) - - const modelMap: Record = {} - const modelEfficiency = aggregateModelEfficiency(projects) - for (const sess of sessions) { - for (const [model, d] of Object.entries(sess.modelBreakdown)) { - if (!modelMap[model]) { modelMap[model] = { calls: 0, cost: 0, inputTokens: 0, outputTokens: 0, cacheReadTokens: 0, cacheWriteTokens: 0 } } - modelMap[model].calls += d.calls - modelMap[model].cost += d.costUSD - modelMap[model].inputTokens += d.tokens.inputTokens - modelMap[model].outputTokens += d.tokens.outputTokens - modelMap[model].cacheReadTokens += d.tokens.cacheReadInputTokens - modelMap[model].cacheWriteTokens += d.tokens.cacheCreationInputTokens - } - } - const models = Object.entries(modelMap) - .sort(([, a], [, b]) => b.cost - a.cost) - .map(([name, { cost, ...rest }]) => { - const efficiency = modelEfficiency.get(name) - return { - name, - ...rest, - cost: convertCost(cost), - editTurns: efficiency?.editTurns ?? 0, - oneShotTurns: efficiency?.oneShotTurns ?? 0, - oneShotRate: efficiency?.oneShotRate ?? null, - retriesPerEdit: efficiency?.retriesPerEdit ?? null, - costPerEdit: efficiency?.costPerEditUSD !== null && efficiency?.costPerEditUSD !== undefined - ? convertCost(efficiency.costPerEditUSD) - : null, - } - }) - - const catMap: Record = {} - for (const sess of sessions) { - for (const [cat, d] of Object.entries(sess.categoryBreakdown)) { - if (!catMap[cat]) { catMap[cat] = { turns: 0, cost: 0, editTurns: 0, oneShotTurns: 0 } } - catMap[cat].turns += d.turns - catMap[cat].cost += d.costUSD - catMap[cat].editTurns += d.editTurns - catMap[cat].oneShotTurns += d.oneShotTurns - } - } - const activities = Object.entries(catMap) - .sort(([, a], [, b]) => b.cost - a.cost) - .map(([cat, d]) => ({ - category: CATEGORY_LABELS[cat as TaskCategory] ?? cat, - cost: convertCost(d.cost), - turns: d.turns, - editTurns: d.editTurns, - oneShotTurns: d.oneShotTurns, - oneShotRate: d.editTurns > 0 ? Math.round((d.oneShotTurns / d.editTurns) * 1000) / 10 : null, - })) - - const toolMap: Record = {} - const mcpMap: Record = {} - const bashMap: Record = {} - for (const sess of sessions) { - for (const [tool, d] of Object.entries(sess.toolBreakdown)) { - toolMap[tool] = (toolMap[tool] ?? 0) + d.calls - } - for (const [server, d] of Object.entries(sess.mcpBreakdown)) { - mcpMap[server] = (mcpMap[server] ?? 0) + d.calls - } - for (const [cmd, d] of Object.entries(sess.bashBreakdown)) { - bashMap[cmd] = (bashMap[cmd] ?? 0) + d.calls - } - } - - const sortedMap = (m: Record) => - Object.entries(m).sort(([, a], [, b]) => b - a).map(([name, calls]) => ({ name, calls })) - - const topSessions = projects - .flatMap(p => p.sessions.map(s => ({ project: p.project, sessionId: s.sessionId, date: s.firstTimestamp ? dateKey(s.firstTimestamp) : null, cost: convertCost(s.totalCostUSD), calls: s.apiCalls }))) - .sort((a, b) => b.cost - a.cost) - .slice(0, 5) - - return { - generated: new Date().toISOString(), - currency: code, - period, - periodKey, - overview: { - cost: convertCost(totalCostUSD), - calls: totalCalls, - sessions: totalSessions, - cacheHitPercent, - tokens: { - input: totalInput, - output: totalOutput, - cacheRead: totalCacheRead, - cacheWrite: totalCacheWrite, - }, - }, - daily, - projects: projectList, - models, - activities, - tools: sortedMap(toolMap), - mcpServers: sortedMap(mcpMap), - shellCommands: sortedMap(bashMap), - topSessions, - } -} - -program - .command('report', { isDefault: true }) - .description('Interactive usage dashboard') - .option('-p, --period ', 'Starting period: today, week, 30days, month, all', 'week') - .option('--from ', 'Start date (YYYY-MM-DD). Overrides --period when set') - .option('--to ', 'End date (YYYY-MM-DD). Overrides --period when set') - .option('--provider ', 'Filter by provider (e.g. claude, gemini, cursor, copilot)', 'all') - .option('--format ', 'Output format: tui, json', 'tui') - .option('--project ', 'Show only projects matching name (repeatable)', collect, []) - .option('--exclude ', 'Exclude projects matching name (repeatable)', collect, []) - .option('--refresh ', 'Auto-refresh interval in seconds (0 to disable)', parseInteger, 30) - .action(async (opts) => { - assertFormat(opts.format, ['tui', 'json'], 'report') - let customRange: DateRange | null = null - try { - customRange = parseDateRangeFlags(opts.from, opts.to) - } catch (err) { - const message = err instanceof Error ? err.message : String(err) - console.error(`\n Error: ${message}\n`) - process.exit(1) - } - - const period = toPeriod(opts.period) - if (opts.format === 'json') { - await loadPricing() - await hydrateCache() - if (customRange) { - const label = formatDateRangeLabel(opts.from, opts.to) - const projects = filterProjectsByName( - await parseAllSessions(customRange, opts.provider), - opts.project, - opts.exclude, - ) - console.log(JSON.stringify(buildJsonReport(projects, label, 'custom'), null, 2)) - } else { - await runJsonReport(period, opts.provider, opts.project, opts.exclude) - } - return - } - await hydrateCache() - const customRangeLabel = customRange ? formatDateRangeLabel(opts.from, opts.to) : undefined - await renderDashboard(period, opts.provider, opts.refresh, opts.project, opts.exclude, customRange, customRangeLabel) - }) - -function buildPeriodData(label: string, projects: ProjectSummary[]): PeriodData { - const sessions = projects.flatMap(p => p.sessions) - const catTotals: Record = {} - const modelTotals: Record = {} - let inputTokens = 0, outputTokens = 0, cacheReadTokens = 0, cacheWriteTokens = 0 - - for (const sess of sessions) { - inputTokens += sess.totalInputTokens - outputTokens += sess.totalOutputTokens - cacheReadTokens += sess.totalCacheReadTokens - cacheWriteTokens += sess.totalCacheWriteTokens - for (const [cat, d] of Object.entries(sess.categoryBreakdown)) { - if (!catTotals[cat]) catTotals[cat] = { turns: 0, cost: 0, editTurns: 0, oneShotTurns: 0 } - catTotals[cat].turns += d.turns - catTotals[cat].cost += d.costUSD - catTotals[cat].editTurns += d.editTurns - catTotals[cat].oneShotTurns += d.oneShotTurns - } - for (const [model, d] of Object.entries(sess.modelBreakdown)) { - if (!modelTotals[model]) modelTotals[model] = { calls: 0, cost: 0 } - modelTotals[model].calls += d.calls - modelTotals[model].cost += d.costUSD - } - } - - return { - label, - cost: projects.reduce((s, p) => s + p.totalCostUSD, 0), - calls: projects.reduce((s, p) => s + p.totalApiCalls, 0), - sessions: projects.reduce((s, p) => s + p.sessions.length, 0), - inputTokens, outputTokens, cacheReadTokens, cacheWriteTokens, - categories: Object.entries(catTotals) - .sort(([, a], [, b]) => b.cost - a.cost) - .map(([cat, d]) => ({ name: CATEGORY_LABELS[cat as TaskCategory] ?? cat, ...d })), - models: Object.entries(modelTotals) - .sort(([, a], [, b]) => b.cost - a.cost) - .map(([name, d]) => ({ name, ...d })), - } -} - -program - .command('status') - .description('Compact status output (today + month)') - .option('--format ', 'Output format: terminal, menubar-json, json', 'terminal') - .option('--provider ', 'Filter by provider (e.g. claude, gemini, cursor, copilot)', 'all') - .option('--project ', 'Show only projects matching name (repeatable)', collect, []) - .option('--exclude ', 'Exclude projects matching name (repeatable)', collect, []) - .option('--period ', 'Primary period for menubar-json: today, week, 30days, month, all', 'today') - .option('--no-optimize', 'Skip optimize findings (menubar-json only, faster)') - .action(async (opts) => { - assertFormat(opts.format, ['terminal', 'menubar-json', 'json'], 'status') - await loadPricing() - const pf = opts.provider - const fp = (p: ProjectSummary[]) => filterProjectsByName(p, opts.project, opts.exclude) - if (opts.format === 'menubar-json') { - const periodInfo = getDateRange(opts.period) - const now = new Date() - const todayStart = new Date(now.getFullYear(), now.getMonth(), now.getDate()) - const yesterdayStr = toDateString(new Date(now.getFullYear(), now.getMonth(), now.getDate() - 1)) - const isAllProviders = pf === 'all' - - const cache = await hydrateCache() - - // CURRENT PERIOD DATA - // - .all provider: assemble from cache + today (fast) - // - specific provider: parse the period range with provider filter (correct, but slower) - let currentData: PeriodData - let scanProjects: ProjectSummary[] - let scanRange: DateRange - - if (isAllProviders) { - // Parse only today's sessions; historical data comes from cache to avoid double-counting - const todayRange: DateRange = { start: todayStart, end: new Date() } - const todayProjects = fp(await parseAllSessions(todayRange, 'all')) - const todayDays = aggregateProjectsIntoDays(todayProjects) - const rangeStartStr = toDateString(periodInfo.range.start) - const rangeEndStr = toDateString(periodInfo.range.end) - const historicalDays = getDaysInRange(cache, rangeStartStr, yesterdayStr) - const todayInRange = todayDays.filter(d => d.date >= rangeStartStr && d.date <= rangeEndStr) - const allDays = [...historicalDays, ...todayInRange].sort((a, b) => a.date.localeCompare(b.date)) - currentData = buildPeriodDataFromDays(allDays, periodInfo.label) - scanProjects = todayProjects - scanRange = periodInfo.range - } else { - const projects = fp(await parseAllSessions(periodInfo.range, pf)) - currentData = buildPeriodData(periodInfo.label, projects) - scanProjects = projects - scanRange = periodInfo.range - } - - // PROVIDERS - // For .all: enumerate every provider with cost across the period (from cache) + installed-but-zero. - // For specific: just this single provider with its scoped cost. - const allProviders = await getAllProviders() - const displayNameByName = new Map(allProviders.map(p => [p.name, p.displayName])) - const providers: ProviderCost[] = [] - if (isAllProviders) { - // Parse only today; historical provider costs come from cache - const todayRangeForProviders: DateRange = { start: todayStart, end: new Date() } - const todayDaysForProviders = aggregateProjectsIntoDays(fp(await parseAllSessions(todayRangeForProviders, 'all'))) - const rangeStartStr = toDateString(periodInfo.range.start) - const todayStr = toDateString(todayStart) - const allDaysForProviders = [ - ...getDaysInRange(cache, rangeStartStr, yesterdayStr), - ...todayDaysForProviders.filter(d => d.date === todayStr), - ] - const providerTotals: Record = {} - for (const d of allDaysForProviders) { - for (const [name, p] of Object.entries(d.providers)) { - providerTotals[name] = (providerTotals[name] ?? 0) + p.cost - } - } - for (const [name, cost] of Object.entries(providerTotals)) { - providers.push({ name: displayNameByName.get(name) ?? name, cost }) - } - for (const p of allProviders) { - if (providers.some(pc => pc.name === p.displayName)) continue - const sources = await p.discoverSessions() - if (sources.length > 0) providers.push({ name: p.displayName, cost: 0 }) - } - } else { - const display = displayNameByName.get(pf) ?? pf - providers.push({ name: display, cost: currentData.cost }) - } - - // DAILY HISTORY (last 365 days) - // Cache stores per-provider cost+calls per day in DailyEntry.providers, so we can derive - // a provider-filtered history without re-parsing. Tokens aren't broken down per provider - // in the cache, so the filtered view shows zero tokens (heatmap/trend still works on cost). - const historyStartStr = toDateString(new Date(now.getFullYear(), now.getMonth(), now.getDate() - BACKFILL_DAYS)) - const allCacheDays = getDaysInRange(cache, historyStartStr, yesterdayStr) - // Parse only today for history; historical days come from cache - const todayRangeForHistory: DateRange = { start: todayStart, end: new Date() } - const allTodayDaysForHistory = aggregateProjectsIntoDays(fp(await parseAllSessions(todayRangeForHistory, 'all'))) - const todayStrForHistory = toDateString(todayStart) - const fullHistory = [...allCacheDays, ...allTodayDaysForHistory.filter(d => d.date === todayStrForHistory)] - const dailyHistory = fullHistory.map(d => { - if (isAllProviders) { - const topModels = Object.entries(d.models) - .filter(([name]) => name !== '') - .sort(([, a], [, b]) => b.cost - a.cost) - .slice(0, 5) - .map(([name, m]) => ({ - name, - cost: m.cost, - calls: m.calls, - inputTokens: m.inputTokens, - outputTokens: m.outputTokens, - })) - return { - date: d.date, - cost: d.cost, - calls: d.calls, - inputTokens: d.inputTokens, - outputTokens: d.outputTokens, - cacheReadTokens: d.cacheReadTokens, - cacheWriteTokens: d.cacheWriteTokens, - topModels, - } - } - const prov = d.providers[pf] ?? { calls: 0, cost: 0 } - return { - date: d.date, - cost: prov.cost, - calls: prov.calls, - inputTokens: 0, - outputTokens: 0, - cacheReadTokens: 0, - cacheWriteTokens: 0, - topModels: [], - } - }) - - const optimize = opts.optimize === false ? null : await scanAndDetect(scanProjects, scanRange) - console.log(JSON.stringify(buildMenubarPayload(currentData, providers, optimize, dailyHistory))) - return - } - - if (opts.format === 'json') { - await hydrateCache() - const todayData = buildPeriodData('today', fp(await parseAllSessions(getDateRange('today').range, pf))) - const monthData = buildPeriodData('month', fp(await parseAllSessions(getDateRange('month').range, pf))) - const { code, rate } = getCurrency() - const payload: { - currency: string - today: { cost: number; calls: number } - month: { cost: number; calls: number } - plan?: JsonPlanSummary - } = { - currency: code, - today: { cost: Math.round(todayData.cost * rate * 100) / 100, calls: todayData.calls }, - month: { cost: Math.round(monthData.cost * rate * 100) / 100, calls: monthData.calls }, - } - const planUsage = await getPlanUsageOrNull() - if (planUsage) { - payload.plan = toJsonPlanSummary(planUsage) - } - console.log(JSON.stringify(payload)) - return - } - - await hydrateCache() - const monthProjects = fp(await parseAllSessions(getDateRange('month').range, pf)) - console.log(renderStatusBar(monthProjects)) - }) - -program - .command('today') - .description('Today\'s usage dashboard') - .option('--provider ', 'Filter by provider (e.g. claude, gemini, cursor, copilot)', 'all') - .option('--format ', 'Output format: tui, json', 'tui') - .option('--project ', 'Show only projects matching name (repeatable)', collect, []) - .option('--exclude ', 'Exclude projects matching name (repeatable)', collect, []) - .option('--refresh ', 'Auto-refresh interval in seconds (0 to disable)', parseInteger, 30) - .action(async (opts) => { - assertFormat(opts.format, ['tui', 'json'], 'today') - if (opts.format === 'json') { - await runJsonReport('today', opts.provider, opts.project, opts.exclude) - return - } - await hydrateCache() - await renderDashboard('today', opts.provider, opts.refresh, opts.project, opts.exclude) - }) - -program - .command('month') - .description('This month\'s usage dashboard') - .option('--provider ', 'Filter by provider (e.g. claude, gemini, cursor, copilot)', 'all') - .option('--format ', 'Output format: tui, json', 'tui') - .option('--project ', 'Show only projects matching name (repeatable)', collect, []) - .option('--exclude ', 'Exclude projects matching name (repeatable)', collect, []) - .option('--refresh ', 'Auto-refresh interval in seconds (0 to disable)', parseInteger, 30) - .action(async (opts) => { - assertFormat(opts.format, ['tui', 'json'], 'month') - if (opts.format === 'json') { - await runJsonReport('month', opts.provider, opts.project, opts.exclude) - return - } - await hydrateCache() - await renderDashboard('month', opts.provider, opts.refresh, opts.project, opts.exclude) - }) - -program - .command('export') - .description('Export usage data to CSV or JSON') - .option('-f, --format ', 'Export format: csv, json', 'csv') - .option('-o, --output ', 'Output file path') - .option('--from ', 'Start date (YYYY-MM-DD). Exports a single custom period when set') - .option('--to ', 'End date (YYYY-MM-DD). Exports a single custom period when set') - .option('--provider ', 'Filter by provider (e.g. claude, gemini, cursor, copilot)', 'all') - .option('--project ', 'Show only projects matching name (repeatable)', collect, []) - .option('--exclude ', 'Exclude projects matching name (repeatable)', collect, []) - .action(async (opts) => { - assertFormat(opts.format, ['csv', 'json'], 'export') - await loadPricing() - await hydrateCache() - const pf = opts.provider - const fp = (p: ProjectSummary[]) => filterProjectsByName(p, opts.project, opts.exclude) - let customRange: DateRange | null = null - try { - customRange = parseDateRangeFlags(opts.from, opts.to) - } catch (err) { - const message = err instanceof Error ? err.message : String(err) - console.error(`\n Error: ${message}\n`) - process.exit(1) - } - - const periods: PeriodExport[] = customRange - ? [{ label: formatDateRangeLabel(opts.from, opts.to), projects: fp(await parseAllSessions(customRange, pf)) }] - : [ - { label: 'Today', projects: fp(await parseAllSessions(getDateRange('today').range, pf)) }, - { label: '7 Days', projects: fp(await parseAllSessions(getDateRange('week').range, pf)) }, - { label: '30 Days', projects: fp(await parseAllSessions(getDateRange('30days').range, pf)) }, - ] - - if (periods.every(p => p.projects.length === 0)) { - console.log('\n No usage data found.\n') - return - } - - const defaultName = `codeburn-${toDateString(new Date())}` - const outputPath = opts.output ?? `${defaultName}.${opts.format}` - - let savedPath: string - try { - if (opts.format === 'json') { - savedPath = await exportJson(periods, outputPath) - } else { - savedPath = await exportCsv(periods, outputPath) - } - } catch (err) { - // Protection guards in export.ts (symlink refusal, non-codeburn folder refusal, etc.) - // throw with a user-readable message. Print just the message, not the stack, so the CLI - // doesn't spray its internals at the user. - const message = err instanceof Error ? err.message : String(err) - console.error(`\n Export failed: ${message}\n`) - process.exit(1) - } - - const exportedLabel = customRange ? formatDateRangeLabel(opts.from, opts.to) : 'Today + 7 Days + 30 Days' - console.log(`\n Exported (${exportedLabel}) to: ${savedPath}\n`) - }) - -program - .command('menubar') - .description('Install and launch the macOS menubar app (one command, no clone)') - .option('--force', 'Reinstall even if an older copy is already in ~/Applications') - .action(async (opts: { force?: boolean }) => { - try { - const result = await installMenubarApp({ force: opts.force }) - console.log(`\n Ready. ${result.installedPath}\n`) - } catch (err) { - const message = err instanceof Error ? err.message : String(err) - console.error(`\n Menubar install failed: ${message}\n`) - process.exit(1) - } - }) - -program - .command('currency [code]') - .description('Set display currency (e.g. codeburn currency GBP)') - .option('--symbol ', 'Override the currency symbol') - .option('--reset', 'Reset to USD (removes currency config)') - .action(async (code?: string, opts?: { symbol?: string; reset?: boolean }) => { - if (opts?.reset) { - const config = await readConfig() - delete config.currency - await saveConfig(config) - console.log('\n Currency reset to USD.\n') - return - } - - if (!code) { - const { code: activeCode, rate, symbol } = getCurrency() - if (activeCode === 'USD' && rate === 1) { - console.log('\n Currency: USD (default)') - console.log(` Config: ${getConfigFilePath()}\n`) - } else { - console.log(`\n Currency: ${activeCode}`) - console.log(` Symbol: ${symbol}`) - console.log(` Rate: 1 USD = ${rate} ${activeCode}`) - console.log(` Config: ${getConfigFilePath()}\n`) - } - return - } - - const upperCode = code.toUpperCase() - if (!isValidCurrencyCode(upperCode)) { - console.error(`\n "${code}" is not a valid ISO 4217 currency code.\n`) - process.exitCode = 1 - return - } - - const config = await readConfig() - config.currency = { - code: upperCode, - ...(opts?.symbol ? { symbol: opts.symbol } : {}), - } - await saveConfig(config) - - await loadCurrency() - const { rate, symbol } = getCurrency() - - console.log(`\n Currency set to ${upperCode}.`) - console.log(` Symbol: ${symbol}`) - console.log(` Rate: 1 USD = ${rate} ${upperCode}`) - console.log(` Config saved to ${getConfigFilePath()}\n`) - }) - -program - .command('model-alias [from] [to]') - .description('Map a provider model name to a canonical one for pricing (e.g. codeburn model-alias my-model claude-opus-4-6)') - .option('--remove ', 'Remove an alias') - .option('--list', 'List configured aliases') - .action(async (from?: string, to?: string, opts?: { remove?: string; list?: boolean }) => { - const config = await readConfig() - const aliases = config.modelAliases ?? {} - - if (opts?.list || (!from && !opts?.remove)) { - const entries = Object.entries(aliases) - if (entries.length === 0) { - console.log('\n No model aliases configured.') - console.log(` Config: ${getConfigFilePath()}\n`) - } else { - console.log('\n Model aliases:') - for (const [src, dst] of entries) { - console.log(` ${src} -> ${dst}`) - } - console.log(` Config: ${getConfigFilePath()}\n`) - } - return - } - - if (opts?.remove) { - if (!(opts.remove in aliases)) { - console.error(`\n Alias not found: ${opts.remove}\n`) - process.exitCode = 1 - return - } - delete aliases[opts.remove] - config.modelAliases = Object.keys(aliases).length > 0 ? aliases : undefined - await saveConfig(config) - console.log(`\n Removed alias: ${opts.remove}\n`) - return - } - - if (!from || !to) { - console.error('\n Usage: codeburn model-alias \n') - process.exitCode = 1 - return - } - - aliases[from] = to - config.modelAliases = aliases - await saveConfig(config) - console.log(`\n Alias saved: ${from} -> ${to}`) - console.log(` Config: ${getConfigFilePath()}\n`) - }) - -program - .command('plan [action] [id]') - .description('Show or configure a subscription plan for overage tracking') - .option('--format ', 'Output format: text or json', 'text') - .option('--monthly-usd ', 'Monthly plan price in USD (for custom)', parseNumber) - .option('--provider ', 'Provider scope: all, claude, codex, cursor', 'all') - .option('--reset-day ', 'Day of month plan resets (1-28)', parseInteger, 1) - .action(async (action?: string, id?: string, opts?: { format?: string; monthlyUsd?: number; provider?: string; resetDay?: number }) => { - assertFormat(opts?.format ?? 'text', ['text', 'json'], 'plan') - const mode = action ?? 'show' - - if (mode === 'show') { - const plan = await readPlan() - const displayPlan = !plan || plan.id === 'none' - ? { id: 'none', monthlyUsd: 0, provider: 'all', resetDay: 1, setAt: null } - : { - id: plan.id, - monthlyUsd: plan.monthlyUsd, - provider: plan.provider, - resetDay: clampResetDay(plan.resetDay), - setAt: plan.setAt, - } - if (opts?.format === 'json') { - console.log(JSON.stringify(displayPlan)) - return - } - if (!plan || plan.id === 'none') { - console.log('\n Plan: none') - console.log(' API-pricing view is active.') - console.log(` Config: ${getConfigFilePath()}\n`) - return - } - console.log(`\n Plan: ${planDisplayName(plan.id)} (${plan.id})`) - console.log(` Budget: $${plan.monthlyUsd}/month`) - console.log(` Provider: ${plan.provider}`) - console.log(` Reset day: ${clampResetDay(plan.resetDay)}`) - console.log(` Set at: ${plan.setAt}`) - console.log(` Config: ${getConfigFilePath()}\n`) - return - } - - if (mode === 'reset') { - await clearPlan() - console.log('\n Plan reset. API-pricing view is active.\n') - return - } - - if (mode !== 'set') { - console.error('\n Usage: codeburn plan [set | reset]\n') - process.exitCode = 1 - return - } - - if (!id || !isPlanId(id)) { - console.error(`\n Plan id must be one of: claude-pro, claude-max, cursor-pro, custom, none; got "${id ?? ''}".\n`) - process.exitCode = 1 - return - } - - const resetDay = opts?.resetDay ?? 1 - if (!Number.isInteger(resetDay) || resetDay < 1 || resetDay > 28) { - console.error(`\n --reset-day must be an integer from 1 to 28; got ${resetDay}.\n`) - process.exitCode = 1 - return - } - - if (id === 'none') { - await clearPlan() - console.log('\n Plan reset. API-pricing view is active.\n') - return - } - - if (id === 'custom') { - if (opts?.monthlyUsd === undefined) { - console.error('\n Custom plans require --monthly-usd .\n') - process.exitCode = 1 - return - } - const monthlyUsd = opts.monthlyUsd - if (!Number.isFinite(monthlyUsd) || monthlyUsd <= 0) { - console.error(`\n --monthly-usd must be a positive number; got ${opts.monthlyUsd}.\n`) - process.exitCode = 1 - return - } - const provider = opts?.provider ?? 'all' - if (!isPlanProvider(provider)) { - console.error(`\n --provider must be one of: all, claude, codex, cursor; got "${provider}".\n`) - process.exitCode = 1 - return - } - await savePlan({ - id: 'custom', - monthlyUsd, - provider, - resetDay, - setAt: new Date().toISOString(), - }) - console.log(`\n Plan set to custom ($${monthlyUsd}/month, ${provider}, reset day ${resetDay}).`) - console.log(` Config saved to ${getConfigFilePath()}\n`) - return - } - - const preset = getPresetPlan(id) - if (!preset) { - console.error(`\n Unknown preset "${id}".\n`) - process.exitCode = 1 - return - } - - await savePlan({ - ...preset, - resetDay, - setAt: new Date().toISOString(), - }) - console.log(`\n Plan set to ${planDisplayName(preset.id)} ($${preset.monthlyUsd}/month).`) - console.log(` Provider: ${preset.provider}`) - console.log(` Reset day: ${resetDay}`) - console.log(` Config saved to ${getConfigFilePath()}\n`) - }) - -program - .command('optimize') - .description('Find token waste and get exact fixes') - .option('-p, --period ', 'Analysis period: today, week, 30days, month, all', '30days') - .option('--provider ', 'Filter by provider (e.g. claude, gemini, cursor, copilot)', 'all') - .action(async (opts) => { - await loadPricing() - await hydrateCache() - const { range, label } = getDateRange(opts.period) - const projects = await parseAllSessions(range, opts.provider) - await runOptimize(projects, label, range) - }) - -program - .command('compare') - .description('Compare two AI models side-by-side') - .option('-p, --period ', 'Analysis period: today, week, 30days, month, all', 'all') - .option('--provider ', 'Filter by provider (e.g. claude, gemini, cursor, copilot)', 'all') - .action(async (opts) => { - await loadPricing() - await hydrateCache() - const { range } = getDateRange(opts.period) - await renderCompare(range, opts.provider) - }) - -program - .command('models') - .description('Per-model token + cost table, optionally exploded by task type') - .option('-p, --period ', 'Analysis period: today, week, 30days, month, all', '30days') - .option('--from ', 'Custom range start (YYYY-MM-DD)') - .option('--to ', 'Custom range end (YYYY-MM-DD)') - .option('--provider ', 'Filter by provider (e.g. claude, codex, cursor)', 'all') - .option('--task ', 'Filter to one task type (e.g. feature, debugging, refactoring)') - .option('--by-task', 'One row per (provider, model, task) instead of one row per (provider, model)') - .option('--top ', 'Show only the top N rows', (v: string) => parseInt(v, 10)) - .option('--min-cost ', 'Hide rows below this cost threshold', (v: string) => parseFloat(v)) - .option('--no-totals', 'Suppress the footer totals row') - .option('--format ', 'Output format: table, markdown, json, csv', 'table') - .action(async (opts) => { - const { aggregateModels, renderTable, renderMarkdown, renderJson, renderCsv } = await import('./models-report.js') - await loadPricing() - await hydrateCache() - - let range - if (opts.from || opts.to) { - const customRange = parseDateRangeFlags(opts.from, opts.to) - if (!customRange) { - process.stderr.write('codeburn: --from and --to must be valid YYYY-MM-DD dates\n') - process.exit(1) - } - range = customRange - } else { - range = getDateRange(opts.period).range - } - - const projects = await parseAllSessions(range, opts.provider) - const rows = await aggregateModels(projects, { - byTask: !!opts.byTask, - taskFilter: opts.task, - topN: typeof opts.top === 'number' && Number.isFinite(opts.top) ? opts.top : undefined, - minCost: typeof opts.minCost === 'number' && Number.isFinite(opts.minCost) ? opts.minCost : 0.01, - }) - - const fmt = (opts.format ?? 'table').toLowerCase() - if (rows.length === 0 && (fmt === 'table' || fmt === 'markdown')) { - process.stdout.write('No model usage found for the selected period.\n') - return - } - if (fmt === 'json') { - process.stdout.write(renderJson(rows) + '\n') - } else if (fmt === 'csv') { - process.stdout.write(renderCsv(rows, { byTask: !!opts.byTask }) + '\n') - } else if (fmt === 'markdown' || fmt === 'md') { - process.stdout.write(renderMarkdown(rows, { byTask: !!opts.byTask, showTotals: opts.totals !== false }) + '\n') - } else if (fmt === 'table') { - process.stdout.write(renderTable(rows, { byTask: !!opts.byTask, showTotals: opts.totals !== false }) + '\n') - } else { - process.stderr.write(`codeburn: unknown --format "${opts.format}". Choose table, markdown, json, or csv.\n`) - process.exit(1) - } - }) - -program - .command('yield') - .description('Track which AI spend shipped to main vs reverted/abandoned (experimental)') - .option('-p, --period ', 'Analysis period: today, week, 30days, month, all', 'week') - .action(async (opts) => { - const { computeYield, formatYieldSummary } = await import('./yield.js') - await loadPricing() - await hydrateCache() - const { range, label } = getDateRange(opts.period) - console.log(`\n Analyzing yield for ${label}...\n`) - const summary = await computeYield(range, process.cwd()) - console.log(formatYieldSummary(summary)) - }) - -program.parse() diff --git a/src/main.ts b/src/main.ts new file mode 100644 index 00000000..4ebfe337 --- /dev/null +++ b/src/main.ts @@ -0,0 +1,978 @@ +import { Command } from 'commander' +import { installMenubarApp } from './menubar-installer.js' +import { exportCsv, exportJson, type PeriodExport } from './export.js' +import { loadPricing, setModelAliases } from './models.js' +import { parseAllSessions, filterProjectsByName } from './parser.js' +import { convertCost } from './currency.js' +import { renderStatusBar } from './format.js' +import { type PeriodData, type ProviderCost } from './menubar-json.js' +import { buildMenubarPayload } from './menubar-json.js' +import { getDaysInRange, ensureCacheHydrated, emptyCache, BACKFILL_DAYS, toDateString } from './daily-cache.js' +import { aggregateProjectsIntoDays, buildPeriodDataFromDays, dateKey } from './day-aggregator.js' +import { CATEGORY_LABELS, type DateRange, type ProjectSummary, type TaskCategory } from './types.js' +import { aggregateModelEfficiency } from './model-efficiency.js' +import { renderDashboard } from './dashboard.js' +import { formatDateRangeLabel, parseDateRangeFlags, getDateRange, toPeriod, type Period } from './cli-date.js' +import { runOptimize, scanAndDetect } from './optimize.js' +import { renderCompare } from './compare.js' +import { getAllProviders } from './providers/index.js' +import { clearPlan, readConfig, readPlan, saveConfig, savePlan, getConfigFilePath, type PlanId } from './config.js' +import { clampResetDay, getPlanUsageOrNull, type PlanUsage } from './plan-usage.js' +import { getPresetPlan, isPlanId, isPlanProvider, planDisplayName } from './plans.js' +import { createRequire } from 'node:module' + +const require = createRequire(import.meta.url) +const { version } = require('../package.json') +import { loadCurrency, getCurrency, isValidCurrencyCode } from './currency.js' + +async function hydrateCache() { + try { + return await ensureCacheHydrated( + (range) => parseAllSessions(range, 'all'), + aggregateProjectsIntoDays, + ) + } catch { + return emptyCache() + } +} + +function collect(val: string, acc: string[]): string[] { + acc.push(val) + return acc +} + +function parseNumber(value: string): number { + return Number(value) +} + +function parseInteger(value: string): number { + return parseInt(value, 10) +} + +type JsonPlanSummary = { + id: PlanId + budget: number + spent: number + percentUsed: number + status: 'under' | 'near' | 'over' + projectedMonthEnd: number + daysUntilReset: number + periodStart: string + periodEnd: string +} + +function toJsonPlanSummary(planUsage: PlanUsage): JsonPlanSummary { + return { + id: planUsage.plan.id, + budget: convertCost(planUsage.budgetUsd), + spent: convertCost(planUsage.spentApiEquivalentUsd), + percentUsed: Math.round(planUsage.percentUsed * 10) / 10, + status: planUsage.status, + projectedMonthEnd: convertCost(planUsage.projectedMonthUsd), + daysUntilReset: planUsage.daysUntilReset, + periodStart: planUsage.periodStart.toISOString(), + periodEnd: planUsage.periodEnd.toISOString(), + } +} + +function assertFormat(value: string, allowed: readonly string[], command: string): void { + if (!allowed.includes(value)) { + process.stderr.write( + `codeburn ${command}: unknown format "${value}". Valid values: ${allowed.join(', ')}.\n` + ) + process.exit(1) + } +} + +async function runJsonReport(period: Period, provider: string, project: string[], exclude: string[]): Promise { + await loadPricing() + const { range, label } = getDateRange(period) + const projects = filterProjectsByName(await parseAllSessions(range, provider), project, exclude) + const report: ReturnType & { plan?: JsonPlanSummary } = buildJsonReport(projects, label, period) + const planUsage = await getPlanUsageOrNull() + if (planUsage) { + report.plan = toJsonPlanSummary(planUsage) + } + console.log(JSON.stringify(report, null, 2)) +} + +const program = new Command() + .name('codeburn') + .description('See where your AI coding tokens go - by task, tool, model, and project') + .version(version) + .option('--verbose', 'print warnings to stderr on read failures and skipped files') + .option('--timezone ', 'IANA timezone for date grouping (e.g. Asia/Tokyo, America/New_York)') + +program.hook('preAction', async (thisCommand) => { + const tz = thisCommand.opts<{ timezone?: string }>().timezone ?? process.env['CODEBURN_TZ'] + if (tz) { + try { + Intl.DateTimeFormat(undefined, { timeZone: tz }) + } catch { + console.error(`\n Invalid timezone: "${tz}". Use an IANA timezone like "America/New_York" or "Asia/Tokyo".\n`) + process.exit(1) + } + process.env.TZ = tz + } + const config = await readConfig() + setModelAliases(config.modelAliases ?? {}) + if (thisCommand.opts<{ verbose?: boolean }>().verbose) { + process.env['CODEBURN_VERBOSE'] = '1' + } + await loadCurrency() +}) + +function buildJsonReport(projects: ProjectSummary[], period: string, periodKey: string) { + const sessions = projects.flatMap(p => p.sessions) + const { code } = getCurrency() + + const totalCostUSD = projects.reduce((s, p) => s + p.totalCostUSD, 0) + const totalCalls = projects.reduce((s, p) => s + p.totalApiCalls, 0) + const totalSessions = projects.reduce((s, p) => s + p.sessions.length, 0) + const totalInput = sessions.reduce((s, sess) => s + sess.totalInputTokens, 0) + const totalOutput = sessions.reduce((s, sess) => s + sess.totalOutputTokens, 0) + const totalCacheRead = sessions.reduce((s, sess) => s + sess.totalCacheReadTokens, 0) + const totalCacheWrite = sessions.reduce((s, sess) => s + sess.totalCacheWriteTokens, 0) + // Match src/menubar-json.ts:cacheHitPercent: reads over reads+fresh-input. cache_write + // counts tokens being stored, not served, so it doesn't belong in the denominator. + const cacheHitDenom = totalInput + totalCacheRead + const cacheHitPercent = cacheHitDenom > 0 ? Math.round((totalCacheRead / cacheHitDenom) * 1000) / 10 : 0 + + // Per-day rollup. Mirrors parser.ts categoryBreakdown semantics so a + // consumer summing daily[].editTurns over a period gets the same total as + // sum(activities[].editTurns) for that period: every turn counts once for + // `turns`, edit turns count for `editTurns`, edit turns with zero retries + // count for `oneShotTurns`. Issue #279 — daily-resolution efficiency + // dashboards need this without re-deriving from activity-level rollups. + const dailyMap: Record = {} + for (const sess of sessions) { + for (const turn of sess.turns) { + // Prefer the user-message timestamp on the turn; fall back to the first + // assistant-call timestamp when the user line is missing (continuation + // sessions where the JSONL begins mid-conversation). Previously these + // turns dropped from daily but stayed in activities, breaking the + // sum(daily[].editTurns) === sum(activities[].editTurns) invariant. + const ts = turn.timestamp || turn.assistantCalls[0]?.timestamp + if (!ts) { continue } + const day = dateKey(ts) + if (!dailyMap[day]) { dailyMap[day] = { cost: 0, calls: 0, turns: 0, editTurns: 0, oneShotTurns: 0 } } + dailyMap[day].turns += 1 + if (turn.hasEdits) { + dailyMap[day].editTurns += 1 + if (turn.retries === 0) dailyMap[day].oneShotTurns += 1 + } + for (const call of turn.assistantCalls) { + dailyMap[day].cost += call.costUSD + dailyMap[day].calls += 1 + } + } + } + const daily = Object.entries(dailyMap).sort().map(([date, d]) => ({ + date, + cost: convertCost(d.cost), + calls: d.calls, + turns: d.turns, + editTurns: d.editTurns, + oneShotTurns: d.oneShotTurns, + // Pre-computed convenience for dashboards that don't want to do the math. + // null when there are no edit turns (the rate is undefined, not zero — + // a day where the user only had Q&A turns shouldn't read as 0% one-shot). + oneShotRate: d.editTurns > 0 + ? Math.round((d.oneShotTurns / d.editTurns) * 1000) / 10 + : null, + })) + + const projectList = projects.map(p => ({ + name: p.project, + path: p.projectPath, + cost: convertCost(p.totalCostUSD), + avgCostPerSession: p.sessions.length > 0 + ? convertCost(p.totalCostUSD / p.sessions.length) + : null, + calls: p.totalApiCalls, + sessions: p.sessions.length, + })) + + const modelMap: Record = {} + const modelEfficiency = aggregateModelEfficiency(projects) + for (const sess of sessions) { + for (const [model, d] of Object.entries(sess.modelBreakdown)) { + if (!modelMap[model]) { modelMap[model] = { calls: 0, cost: 0, inputTokens: 0, outputTokens: 0, cacheReadTokens: 0, cacheWriteTokens: 0 } } + modelMap[model].calls += d.calls + modelMap[model].cost += d.costUSD + modelMap[model].inputTokens += d.tokens.inputTokens + modelMap[model].outputTokens += d.tokens.outputTokens + modelMap[model].cacheReadTokens += d.tokens.cacheReadInputTokens + modelMap[model].cacheWriteTokens += d.tokens.cacheCreationInputTokens + } + } + const models = Object.entries(modelMap) + .sort(([, a], [, b]) => b.cost - a.cost) + .map(([name, { cost, ...rest }]) => { + const efficiency = modelEfficiency.get(name) + return { + name, + ...rest, + cost: convertCost(cost), + editTurns: efficiency?.editTurns ?? 0, + oneShotTurns: efficiency?.oneShotTurns ?? 0, + oneShotRate: efficiency?.oneShotRate ?? null, + retriesPerEdit: efficiency?.retriesPerEdit ?? null, + costPerEdit: efficiency?.costPerEditUSD !== null && efficiency?.costPerEditUSD !== undefined + ? convertCost(efficiency.costPerEditUSD) + : null, + } + }) + + const catMap: Record = {} + for (const sess of sessions) { + for (const [cat, d] of Object.entries(sess.categoryBreakdown)) { + if (!catMap[cat]) { catMap[cat] = { turns: 0, cost: 0, editTurns: 0, oneShotTurns: 0 } } + catMap[cat].turns += d.turns + catMap[cat].cost += d.costUSD + catMap[cat].editTurns += d.editTurns + catMap[cat].oneShotTurns += d.oneShotTurns + } + } + const activities = Object.entries(catMap) + .sort(([, a], [, b]) => b.cost - a.cost) + .map(([cat, d]) => ({ + category: CATEGORY_LABELS[cat as TaskCategory] ?? cat, + cost: convertCost(d.cost), + turns: d.turns, + editTurns: d.editTurns, + oneShotTurns: d.oneShotTurns, + oneShotRate: d.editTurns > 0 ? Math.round((d.oneShotTurns / d.editTurns) * 1000) / 10 : null, + })) + + const toolMap: Record = {} + const mcpMap: Record = {} + const bashMap: Record = {} + for (const sess of sessions) { + for (const [tool, d] of Object.entries(sess.toolBreakdown)) { + toolMap[tool] = (toolMap[tool] ?? 0) + d.calls + } + for (const [server, d] of Object.entries(sess.mcpBreakdown)) { + mcpMap[server] = (mcpMap[server] ?? 0) + d.calls + } + for (const [cmd, d] of Object.entries(sess.bashBreakdown)) { + bashMap[cmd] = (bashMap[cmd] ?? 0) + d.calls + } + } + + const sortedMap = (m: Record) => + Object.entries(m).sort(([, a], [, b]) => b - a).map(([name, calls]) => ({ name, calls })) + + const topSessions = projects + .flatMap(p => p.sessions.map(s => ({ project: p.project, sessionId: s.sessionId, date: s.firstTimestamp ? dateKey(s.firstTimestamp) : null, cost: convertCost(s.totalCostUSD), calls: s.apiCalls }))) + .sort((a, b) => b.cost - a.cost) + .slice(0, 5) + + return { + generated: new Date().toISOString(), + currency: code, + period, + periodKey, + overview: { + cost: convertCost(totalCostUSD), + calls: totalCalls, + sessions: totalSessions, + cacheHitPercent, + tokens: { + input: totalInput, + output: totalOutput, + cacheRead: totalCacheRead, + cacheWrite: totalCacheWrite, + }, + }, + daily, + projects: projectList, + models, + activities, + tools: sortedMap(toolMap), + mcpServers: sortedMap(mcpMap), + shellCommands: sortedMap(bashMap), + topSessions, + } +} + +program + .command('report', { isDefault: true }) + .description('Interactive usage dashboard') + .option('-p, --period ', 'Starting period: today, week, 30days, month, all', 'week') + .option('--from ', 'Start date (YYYY-MM-DD). Overrides --period when set') + .option('--to ', 'End date (YYYY-MM-DD). Overrides --period when set') + .option('--provider ', 'Filter by provider (e.g. claude, gemini, cursor, copilot)', 'all') + .option('--format ', 'Output format: tui, json', 'tui') + .option('--project ', 'Show only projects matching name (repeatable)', collect, []) + .option('--exclude ', 'Exclude projects matching name (repeatable)', collect, []) + .option('--refresh ', 'Auto-refresh interval in seconds (0 to disable)', parseInteger, 30) + .action(async (opts) => { + assertFormat(opts.format, ['tui', 'json'], 'report') + let customRange: DateRange | null = null + try { + customRange = parseDateRangeFlags(opts.from, opts.to) + } catch (err) { + const message = err instanceof Error ? err.message : String(err) + console.error(`\n Error: ${message}\n`) + process.exit(1) + } + + const period = toPeriod(opts.period) + if (opts.format === 'json') { + await loadPricing() + await hydrateCache() + if (customRange) { + const label = formatDateRangeLabel(opts.from, opts.to) + const projects = filterProjectsByName( + await parseAllSessions(customRange, opts.provider), + opts.project, + opts.exclude, + ) + console.log(JSON.stringify(buildJsonReport(projects, label, 'custom'), null, 2)) + } else { + await runJsonReport(period, opts.provider, opts.project, opts.exclude) + } + return + } + await hydrateCache() + const customRangeLabel = customRange ? formatDateRangeLabel(opts.from, opts.to) : undefined + await renderDashboard(period, opts.provider, opts.refresh, opts.project, opts.exclude, customRange, customRangeLabel) + }) + +function buildPeriodData(label: string, projects: ProjectSummary[]): PeriodData { + const sessions = projects.flatMap(p => p.sessions) + const catTotals: Record = {} + const modelTotals: Record = {} + let inputTokens = 0, outputTokens = 0, cacheReadTokens = 0, cacheWriteTokens = 0 + + for (const sess of sessions) { + inputTokens += sess.totalInputTokens + outputTokens += sess.totalOutputTokens + cacheReadTokens += sess.totalCacheReadTokens + cacheWriteTokens += sess.totalCacheWriteTokens + for (const [cat, d] of Object.entries(sess.categoryBreakdown)) { + if (!catTotals[cat]) catTotals[cat] = { turns: 0, cost: 0, editTurns: 0, oneShotTurns: 0 } + catTotals[cat].turns += d.turns + catTotals[cat].cost += d.costUSD + catTotals[cat].editTurns += d.editTurns + catTotals[cat].oneShotTurns += d.oneShotTurns + } + for (const [model, d] of Object.entries(sess.modelBreakdown)) { + if (!modelTotals[model]) modelTotals[model] = { calls: 0, cost: 0 } + modelTotals[model].calls += d.calls + modelTotals[model].cost += d.costUSD + } + } + + return { + label, + cost: projects.reduce((s, p) => s + p.totalCostUSD, 0), + calls: projects.reduce((s, p) => s + p.totalApiCalls, 0), + sessions: projects.reduce((s, p) => s + p.sessions.length, 0), + inputTokens, outputTokens, cacheReadTokens, cacheWriteTokens, + categories: Object.entries(catTotals) + .sort(([, a], [, b]) => b.cost - a.cost) + .map(([cat, d]) => ({ name: CATEGORY_LABELS[cat as TaskCategory] ?? cat, ...d })), + models: Object.entries(modelTotals) + .sort(([, a], [, b]) => b.cost - a.cost) + .map(([name, d]) => ({ name, ...d })), + } +} + +program + .command('status') + .description('Compact status output (today + month)') + .option('--format ', 'Output format: terminal, menubar-json, json', 'terminal') + .option('--provider ', 'Filter by provider (e.g. claude, gemini, cursor, copilot)', 'all') + .option('--project ', 'Show only projects matching name (repeatable)', collect, []) + .option('--exclude ', 'Exclude projects matching name (repeatable)', collect, []) + .option('--period ', 'Primary period for menubar-json: today, week, 30days, month, all', 'today') + .option('--no-optimize', 'Skip optimize findings (menubar-json only, faster)') + .action(async (opts) => { + assertFormat(opts.format, ['terminal', 'menubar-json', 'json'], 'status') + await loadPricing() + const pf = opts.provider + const fp = (p: ProjectSummary[]) => filterProjectsByName(p, opts.project, opts.exclude) + if (opts.format === 'menubar-json') { + const periodInfo = getDateRange(opts.period) + const now = new Date() + const todayStart = new Date(now.getFullYear(), now.getMonth(), now.getDate()) + const yesterdayStr = toDateString(new Date(now.getFullYear(), now.getMonth(), now.getDate() - 1)) + const isAllProviders = pf === 'all' + + const cache = await hydrateCache() + + // CURRENT PERIOD DATA + // - .all provider: assemble from cache + today (fast) + // - specific provider: parse the period range with provider filter (correct, but slower) + let currentData: PeriodData + let scanProjects: ProjectSummary[] + let scanRange: DateRange + + if (isAllProviders) { + // Parse only today's sessions; historical data comes from cache to avoid double-counting + const todayRange: DateRange = { start: todayStart, end: new Date() } + const todayProjects = fp(await parseAllSessions(todayRange, 'all')) + const todayDays = aggregateProjectsIntoDays(todayProjects) + const rangeStartStr = toDateString(periodInfo.range.start) + const rangeEndStr = toDateString(periodInfo.range.end) + const historicalDays = getDaysInRange(cache, rangeStartStr, yesterdayStr) + const todayInRange = todayDays.filter(d => d.date >= rangeStartStr && d.date <= rangeEndStr) + const allDays = [...historicalDays, ...todayInRange].sort((a, b) => a.date.localeCompare(b.date)) + currentData = buildPeriodDataFromDays(allDays, periodInfo.label) + scanProjects = todayProjects + scanRange = periodInfo.range + } else { + const projects = fp(await parseAllSessions(periodInfo.range, pf)) + currentData = buildPeriodData(periodInfo.label, projects) + scanProjects = projects + scanRange = periodInfo.range + } + + // PROVIDERS + // For .all: enumerate every provider with cost across the period (from cache) + installed-but-zero. + // For specific: just this single provider with its scoped cost. + const allProviders = await getAllProviders() + const displayNameByName = new Map(allProviders.map(p => [p.name, p.displayName])) + const providers: ProviderCost[] = [] + if (isAllProviders) { + // Parse only today; historical provider costs come from cache + const todayRangeForProviders: DateRange = { start: todayStart, end: new Date() } + const todayDaysForProviders = aggregateProjectsIntoDays(fp(await parseAllSessions(todayRangeForProviders, 'all'))) + const rangeStartStr = toDateString(periodInfo.range.start) + const todayStr = toDateString(todayStart) + const allDaysForProviders = [ + ...getDaysInRange(cache, rangeStartStr, yesterdayStr), + ...todayDaysForProviders.filter(d => d.date === todayStr), + ] + const providerTotals: Record = {} + for (const d of allDaysForProviders) { + for (const [name, p] of Object.entries(d.providers)) { + providerTotals[name] = (providerTotals[name] ?? 0) + p.cost + } + } + for (const [name, cost] of Object.entries(providerTotals)) { + providers.push({ name: displayNameByName.get(name) ?? name, cost }) + } + for (const p of allProviders) { + if (providers.some(pc => pc.name === p.displayName)) continue + const sources = await p.discoverSessions() + if (sources.length > 0) providers.push({ name: p.displayName, cost: 0 }) + } + } else { + const display = displayNameByName.get(pf) ?? pf + providers.push({ name: display, cost: currentData.cost }) + } + + // DAILY HISTORY (last 365 days) + // Cache stores per-provider cost+calls per day in DailyEntry.providers, so we can derive + // a provider-filtered history without re-parsing. Tokens aren't broken down per provider + // in the cache, so the filtered view shows zero tokens (heatmap/trend still works on cost). + const historyStartStr = toDateString(new Date(now.getFullYear(), now.getMonth(), now.getDate() - BACKFILL_DAYS)) + const allCacheDays = getDaysInRange(cache, historyStartStr, yesterdayStr) + // Parse only today for history; historical days come from cache + const todayRangeForHistory: DateRange = { start: todayStart, end: new Date() } + const allTodayDaysForHistory = aggregateProjectsIntoDays(fp(await parseAllSessions(todayRangeForHistory, 'all'))) + const todayStrForHistory = toDateString(todayStart) + const fullHistory = [...allCacheDays, ...allTodayDaysForHistory.filter(d => d.date === todayStrForHistory)] + const dailyHistory = fullHistory.map(d => { + if (isAllProviders) { + const topModels = Object.entries(d.models) + .filter(([name]) => name !== '') + .sort(([, a], [, b]) => b.cost - a.cost) + .slice(0, 5) + .map(([name, m]) => ({ + name, + cost: m.cost, + calls: m.calls, + inputTokens: m.inputTokens, + outputTokens: m.outputTokens, + })) + return { + date: d.date, + cost: d.cost, + calls: d.calls, + inputTokens: d.inputTokens, + outputTokens: d.outputTokens, + cacheReadTokens: d.cacheReadTokens, + cacheWriteTokens: d.cacheWriteTokens, + topModels, + } + } + const prov = d.providers[pf] ?? { calls: 0, cost: 0 } + return { + date: d.date, + cost: prov.cost, + calls: prov.calls, + inputTokens: 0, + outputTokens: 0, + cacheReadTokens: 0, + cacheWriteTokens: 0, + topModels: [], + } + }) + + const optimize = opts.optimize === false ? null : await scanAndDetect(scanProjects, scanRange) + console.log(JSON.stringify(buildMenubarPayload(currentData, providers, optimize, dailyHistory))) + return + } + + if (opts.format === 'json') { + await hydrateCache() + const todayData = buildPeriodData('today', fp(await parseAllSessions(getDateRange('today').range, pf))) + const monthData = buildPeriodData('month', fp(await parseAllSessions(getDateRange('month').range, pf))) + const { code, rate } = getCurrency() + const payload: { + currency: string + today: { cost: number; calls: number } + month: { cost: number; calls: number } + plan?: JsonPlanSummary + } = { + currency: code, + today: { cost: Math.round(todayData.cost * rate * 100) / 100, calls: todayData.calls }, + month: { cost: Math.round(monthData.cost * rate * 100) / 100, calls: monthData.calls }, + } + const planUsage = await getPlanUsageOrNull() + if (planUsage) { + payload.plan = toJsonPlanSummary(planUsage) + } + console.log(JSON.stringify(payload)) + return + } + + await hydrateCache() + const monthProjects = fp(await parseAllSessions(getDateRange('month').range, pf)) + console.log(renderStatusBar(monthProjects)) + }) + +program + .command('today') + .description('Today\'s usage dashboard') + .option('--provider ', 'Filter by provider (e.g. claude, gemini, cursor, copilot)', 'all') + .option('--format ', 'Output format: tui, json', 'tui') + .option('--project ', 'Show only projects matching name (repeatable)', collect, []) + .option('--exclude ', 'Exclude projects matching name (repeatable)', collect, []) + .option('--refresh ', 'Auto-refresh interval in seconds (0 to disable)', parseInteger, 30) + .action(async (opts) => { + assertFormat(opts.format, ['tui', 'json'], 'today') + if (opts.format === 'json') { + await runJsonReport('today', opts.provider, opts.project, opts.exclude) + return + } + await hydrateCache() + await renderDashboard('today', opts.provider, opts.refresh, opts.project, opts.exclude) + }) + +program + .command('month') + .description('This month\'s usage dashboard') + .option('--provider ', 'Filter by provider (e.g. claude, gemini, cursor, copilot)', 'all') + .option('--format ', 'Output format: tui, json', 'tui') + .option('--project ', 'Show only projects matching name (repeatable)', collect, []) + .option('--exclude ', 'Exclude projects matching name (repeatable)', collect, []) + .option('--refresh ', 'Auto-refresh interval in seconds (0 to disable)', parseInteger, 30) + .action(async (opts) => { + assertFormat(opts.format, ['tui', 'json'], 'month') + if (opts.format === 'json') { + await runJsonReport('month', opts.provider, opts.project, opts.exclude) + return + } + await hydrateCache() + await renderDashboard('month', opts.provider, opts.refresh, opts.project, opts.exclude) + }) + +program + .command('export') + .description('Export usage data to CSV or JSON') + .option('-f, --format ', 'Export format: csv, json', 'csv') + .option('-o, --output ', 'Output file path') + .option('--from ', 'Start date (YYYY-MM-DD). Exports a single custom period when set') + .option('--to ', 'End date (YYYY-MM-DD). Exports a single custom period when set') + .option('--provider ', 'Filter by provider (e.g. claude, gemini, cursor, copilot)', 'all') + .option('--project ', 'Show only projects matching name (repeatable)', collect, []) + .option('--exclude ', 'Exclude projects matching name (repeatable)', collect, []) + .action(async (opts) => { + assertFormat(opts.format, ['csv', 'json'], 'export') + await loadPricing() + await hydrateCache() + const pf = opts.provider + const fp = (p: ProjectSummary[]) => filterProjectsByName(p, opts.project, opts.exclude) + let customRange: DateRange | null = null + try { + customRange = parseDateRangeFlags(opts.from, opts.to) + } catch (err) { + const message = err instanceof Error ? err.message : String(err) + console.error(`\n Error: ${message}\n`) + process.exit(1) + } + + const periods: PeriodExport[] = customRange + ? [{ label: formatDateRangeLabel(opts.from, opts.to), projects: fp(await parseAllSessions(customRange, pf)) }] + : [ + { label: 'Today', projects: fp(await parseAllSessions(getDateRange('today').range, pf)) }, + { label: '7 Days', projects: fp(await parseAllSessions(getDateRange('week').range, pf)) }, + { label: '30 Days', projects: fp(await parseAllSessions(getDateRange('30days').range, pf)) }, + ] + + if (periods.every(p => p.projects.length === 0)) { + console.log('\n No usage data found.\n') + return + } + + const defaultName = `codeburn-${toDateString(new Date())}` + const outputPath = opts.output ?? `${defaultName}.${opts.format}` + + let savedPath: string + try { + if (opts.format === 'json') { + savedPath = await exportJson(periods, outputPath) + } else { + savedPath = await exportCsv(periods, outputPath) + } + } catch (err) { + // Protection guards in export.ts (symlink refusal, non-codeburn folder refusal, etc.) + // throw with a user-readable message. Print just the message, not the stack, so the CLI + // doesn't spray its internals at the user. + const message = err instanceof Error ? err.message : String(err) + console.error(`\n Export failed: ${message}\n`) + process.exit(1) + } + + const exportedLabel = customRange ? formatDateRangeLabel(opts.from, opts.to) : 'Today + 7 Days + 30 Days' + console.log(`\n Exported (${exportedLabel}) to: ${savedPath}\n`) + }) + +program + .command('menubar') + .description('Install and launch the macOS menubar app (one command, no clone)') + .option('--force', 'Reinstall even if an older copy is already in ~/Applications') + .action(async (opts: { force?: boolean }) => { + try { + const result = await installMenubarApp({ force: opts.force }) + console.log(`\n Ready. ${result.installedPath}\n`) + } catch (err) { + const message = err instanceof Error ? err.message : String(err) + console.error(`\n Menubar install failed: ${message}\n`) + process.exit(1) + } + }) + +program + .command('currency [code]') + .description('Set display currency (e.g. codeburn currency GBP)') + .option('--symbol ', 'Override the currency symbol') + .option('--reset', 'Reset to USD (removes currency config)') + .action(async (code?: string, opts?: { symbol?: string; reset?: boolean }) => { + if (opts?.reset) { + const config = await readConfig() + delete config.currency + await saveConfig(config) + console.log('\n Currency reset to USD.\n') + return + } + + if (!code) { + const { code: activeCode, rate, symbol } = getCurrency() + if (activeCode === 'USD' && rate === 1) { + console.log('\n Currency: USD (default)') + console.log(` Config: ${getConfigFilePath()}\n`) + } else { + console.log(`\n Currency: ${activeCode}`) + console.log(` Symbol: ${symbol}`) + console.log(` Rate: 1 USD = ${rate} ${activeCode}`) + console.log(` Config: ${getConfigFilePath()}\n`) + } + return + } + + const upperCode = code.toUpperCase() + if (!isValidCurrencyCode(upperCode)) { + console.error(`\n "${code}" is not a valid ISO 4217 currency code.\n`) + process.exitCode = 1 + return + } + + const config = await readConfig() + config.currency = { + code: upperCode, + ...(opts?.symbol ? { symbol: opts.symbol } : {}), + } + await saveConfig(config) + + await loadCurrency() + const { rate, symbol } = getCurrency() + + console.log(`\n Currency set to ${upperCode}.`) + console.log(` Symbol: ${symbol}`) + console.log(` Rate: 1 USD = ${rate} ${upperCode}`) + console.log(` Config saved to ${getConfigFilePath()}\n`) + }) + +program + .command('model-alias [from] [to]') + .description('Map a provider model name to a canonical one for pricing (e.g. codeburn model-alias my-model claude-opus-4-6)') + .option('--remove ', 'Remove an alias') + .option('--list', 'List configured aliases') + .action(async (from?: string, to?: string, opts?: { remove?: string; list?: boolean }) => { + const config = await readConfig() + const aliases = config.modelAliases ?? {} + + if (opts?.list || (!from && !opts?.remove)) { + const entries = Object.entries(aliases) + if (entries.length === 0) { + console.log('\n No model aliases configured.') + console.log(` Config: ${getConfigFilePath()}\n`) + } else { + console.log('\n Model aliases:') + for (const [src, dst] of entries) { + console.log(` ${src} -> ${dst}`) + } + console.log(` Config: ${getConfigFilePath()}\n`) + } + return + } + + if (opts?.remove) { + if (!(opts.remove in aliases)) { + console.error(`\n Alias not found: ${opts.remove}\n`) + process.exitCode = 1 + return + } + delete aliases[opts.remove] + config.modelAliases = Object.keys(aliases).length > 0 ? aliases : undefined + await saveConfig(config) + console.log(`\n Removed alias: ${opts.remove}\n`) + return + } + + if (!from || !to) { + console.error('\n Usage: codeburn model-alias \n') + process.exitCode = 1 + return + } + + aliases[from] = to + config.modelAliases = aliases + await saveConfig(config) + console.log(`\n Alias saved: ${from} -> ${to}`) + console.log(` Config: ${getConfigFilePath()}\n`) + }) + +program + .command('plan [action] [id]') + .description('Show or configure a subscription plan for overage tracking') + .option('--format ', 'Output format: text or json', 'text') + .option('--monthly-usd ', 'Monthly plan price in USD (for custom)', parseNumber) + .option('--provider ', 'Provider scope: all, claude, codex, cursor', 'all') + .option('--reset-day ', 'Day of month plan resets (1-28)', parseInteger, 1) + .action(async (action?: string, id?: string, opts?: { format?: string; monthlyUsd?: number; provider?: string; resetDay?: number }) => { + assertFormat(opts?.format ?? 'text', ['text', 'json'], 'plan') + const mode = action ?? 'show' + + if (mode === 'show') { + const plan = await readPlan() + const displayPlan = !plan || plan.id === 'none' + ? { id: 'none', monthlyUsd: 0, provider: 'all', resetDay: 1, setAt: null } + : { + id: plan.id, + monthlyUsd: plan.monthlyUsd, + provider: plan.provider, + resetDay: clampResetDay(plan.resetDay), + setAt: plan.setAt, + } + if (opts?.format === 'json') { + console.log(JSON.stringify(displayPlan)) + return + } + if (!plan || plan.id === 'none') { + console.log('\n Plan: none') + console.log(' API-pricing view is active.') + console.log(` Config: ${getConfigFilePath()}\n`) + return + } + console.log(`\n Plan: ${planDisplayName(plan.id)} (${plan.id})`) + console.log(` Budget: $${plan.monthlyUsd}/month`) + console.log(` Provider: ${plan.provider}`) + console.log(` Reset day: ${clampResetDay(plan.resetDay)}`) + console.log(` Set at: ${plan.setAt}`) + console.log(` Config: ${getConfigFilePath()}\n`) + return + } + + if (mode === 'reset') { + await clearPlan() + console.log('\n Plan reset. API-pricing view is active.\n') + return + } + + if (mode !== 'set') { + console.error('\n Usage: codeburn plan [set | reset]\n') + process.exitCode = 1 + return + } + + if (!id || !isPlanId(id)) { + console.error(`\n Plan id must be one of: claude-pro, claude-max, cursor-pro, custom, none; got "${id ?? ''}".\n`) + process.exitCode = 1 + return + } + + const resetDay = opts?.resetDay ?? 1 + if (!Number.isInteger(resetDay) || resetDay < 1 || resetDay > 28) { + console.error(`\n --reset-day must be an integer from 1 to 28; got ${resetDay}.\n`) + process.exitCode = 1 + return + } + + if (id === 'none') { + await clearPlan() + console.log('\n Plan reset. API-pricing view is active.\n') + return + } + + if (id === 'custom') { + if (opts?.monthlyUsd === undefined) { + console.error('\n Custom plans require --monthly-usd .\n') + process.exitCode = 1 + return + } + const monthlyUsd = opts.monthlyUsd + if (!Number.isFinite(monthlyUsd) || monthlyUsd <= 0) { + console.error(`\n --monthly-usd must be a positive number; got ${opts.monthlyUsd}.\n`) + process.exitCode = 1 + return + } + const provider = opts?.provider ?? 'all' + if (!isPlanProvider(provider)) { + console.error(`\n --provider must be one of: all, claude, codex, cursor; got "${provider}".\n`) + process.exitCode = 1 + return + } + await savePlan({ + id: 'custom', + monthlyUsd, + provider, + resetDay, + setAt: new Date().toISOString(), + }) + console.log(`\n Plan set to custom ($${monthlyUsd}/month, ${provider}, reset day ${resetDay}).`) + console.log(` Config saved to ${getConfigFilePath()}\n`) + return + } + + const preset = getPresetPlan(id) + if (!preset) { + console.error(`\n Unknown preset "${id}".\n`) + process.exitCode = 1 + return + } + + await savePlan({ + ...preset, + resetDay, + setAt: new Date().toISOString(), + }) + console.log(`\n Plan set to ${planDisplayName(preset.id)} ($${preset.monthlyUsd}/month).`) + console.log(` Provider: ${preset.provider}`) + console.log(` Reset day: ${resetDay}`) + console.log(` Config saved to ${getConfigFilePath()}\n`) + }) + +program + .command('optimize') + .description('Find token waste and get exact fixes') + .option('-p, --period ', 'Analysis period: today, week, 30days, month, all', '30days') + .option('--provider ', 'Filter by provider (e.g. claude, gemini, cursor, copilot)', 'all') + .action(async (opts) => { + await loadPricing() + await hydrateCache() + const { range, label } = getDateRange(opts.period) + const projects = await parseAllSessions(range, opts.provider) + await runOptimize(projects, label, range) + }) + +program + .command('compare') + .description('Compare two AI models side-by-side') + .option('-p, --period ', 'Analysis period: today, week, 30days, month, all', 'all') + .option('--provider ', 'Filter by provider (e.g. claude, gemini, cursor, copilot)', 'all') + .action(async (opts) => { + await loadPricing() + await hydrateCache() + const { range } = getDateRange(opts.period) + await renderCompare(range, opts.provider) + }) + +program + .command('models') + .description('Per-model token + cost table, optionally exploded by task type') + .option('-p, --period ', 'Analysis period: today, week, 30days, month, all', '30days') + .option('--from ', 'Custom range start (YYYY-MM-DD)') + .option('--to ', 'Custom range end (YYYY-MM-DD)') + .option('--provider ', 'Filter by provider (e.g. claude, codex, cursor)', 'all') + .option('--task ', 'Filter to one task type (e.g. feature, debugging, refactoring)') + .option('--by-task', 'One row per (provider, model, task) instead of one row per (provider, model)') + .option('--top ', 'Show only the top N rows', (v: string) => parseInt(v, 10)) + .option('--min-cost ', 'Hide rows below this cost threshold', (v: string) => parseFloat(v)) + .option('--no-totals', 'Suppress the footer totals row') + .option('--format ', 'Output format: table, markdown, json, csv', 'table') + .action(async (opts) => { + const { aggregateModels, renderTable, renderMarkdown, renderJson, renderCsv } = await import('./models-report.js') + await loadPricing() + await hydrateCache() + + let range + if (opts.from || opts.to) { + const customRange = parseDateRangeFlags(opts.from, opts.to) + if (!customRange) { + process.stderr.write('codeburn: --from and --to must be valid YYYY-MM-DD dates\n') + process.exit(1) + } + range = customRange + } else { + range = getDateRange(opts.period).range + } + + const projects = await parseAllSessions(range, opts.provider) + const rows = await aggregateModels(projects, { + byTask: !!opts.byTask, + taskFilter: opts.task, + topN: typeof opts.top === 'number' && Number.isFinite(opts.top) ? opts.top : undefined, + minCost: typeof opts.minCost === 'number' && Number.isFinite(opts.minCost) ? opts.minCost : 0.01, + }) + + const fmt = (opts.format ?? 'table').toLowerCase() + if (rows.length === 0 && (fmt === 'table' || fmt === 'markdown')) { + process.stdout.write('No model usage found for the selected period.\n') + return + } + if (fmt === 'json') { + process.stdout.write(renderJson(rows) + '\n') + } else if (fmt === 'csv') { + process.stdout.write(renderCsv(rows, { byTask: !!opts.byTask }) + '\n') + } else if (fmt === 'markdown' || fmt === 'md') { + process.stdout.write(renderMarkdown(rows, { byTask: !!opts.byTask, showTotals: opts.totals !== false }) + '\n') + } else if (fmt === 'table') { + process.stdout.write(renderTable(rows, { byTask: !!opts.byTask, showTotals: opts.totals !== false }) + '\n') + } else { + process.stderr.write(`codeburn: unknown --format "${opts.format}". Choose table, markdown, json, or csv.\n`) + process.exit(1) + } + }) + +program + .command('yield') + .description('Track which AI spend shipped to main vs reverted/abandoned (experimental)') + .option('-p, --period ', 'Analysis period: today, week, 30days, month, all', 'week') + .action(async (opts) => { + const { computeYield, formatYieldSummary } = await import('./yield.js') + await loadPricing() + await hydrateCache() + const { range, label } = getDateRange(opts.period) + console.log(`\n Analyzing yield for ${label}...\n`) + const summary = await computeYield(range, process.cwd()) + console.log(formatYieldSummary(summary)) + }) + +program.parse() diff --git a/tsup.config.ts b/tsup.config.ts index 2ba26c58..957fdce8 100644 --- a/tsup.config.ts +++ b/tsup.config.ts @@ -1,7 +1,7 @@ import { defineConfig } from 'tsup' export default defineConfig({ - entry: ['src/cli.ts'], + entry: ['src/main.ts'], format: ['esm'], target: 'node20', outDir: 'dist', @@ -9,7 +9,4 @@ export default defineConfig({ splitting: false, sourcemap: true, dts: false, - banner: { - js: '#!/usr/bin/env node', - }, }) From 3b71650f243d4ccc80c7f60e07f10098f99d0e45 Mon Sep 17 00:00:00 2001 From: Resham Joshi <65915470+iamtoruk@users.noreply.github.com> Date: Mon, 11 May 2026 22:02:38 -0700 Subject: [PATCH 11/17] Fix mangled project paths in dashboard (#320) * Fix mangled project paths in By Project and Top Sessions panels shortProject() decoded Claude Code slugs by splitting on '-', which broke directory names containing dashes ('foo-bar' became 'foo/bar'). Switch the dashboard to consume ProjectSummary.projectPath (the canonical cwd already extracted by parser.ts) and rewrite shortProject to operate on a real absolute path. * shortProject: cache homedir, normalize Windows backslashes, fix stale test helper --------- Co-authored-by: Abdallah Meghraoui --- src/dashboard.tsx | 27 +++++++++++++++------------ tests/dashboard.test.ts | 35 ++++++++++++++++++++++++++++++++++- 2 files changed, 49 insertions(+), 13 deletions(-) diff --git a/src/dashboard.tsx b/src/dashboard.tsx index e666b183..0d84fbd9 100644 --- a/src/dashboard.tsx +++ b/src/dashboard.tsx @@ -248,16 +248,19 @@ function DailyActivity({ projects, days = 14, pw, bw }: { projects: ProjectSumma ) } -const _homeEncoded = homedir().replace(/\//g, '-') - -function shortProject(encoded: string): string { - let path = encoded.replace(/^-/, '') - if (path.startsWith(_homeEncoded.replace(/^-/, ''))) { - path = path.slice(_homeEncoded.replace(/^-/, '').length).replace(/^-/, '') - } - path = path.replace(/^private-tmp-[^-]+-[^-]+-/, '').replace(/^private-tmp-/, '').replace(/^tmp-/, '') +const _home = homedir() +const _homePrefix = _home.endsWith('/') ? _home : _home + '/' + +export function shortProject(absPath: string): string { + const normalized = absPath.replace(/\\/g, '/') + let path: string + if (normalized === _home) path = '' + else if (normalized.startsWith(_homePrefix)) path = normalized.slice(_homePrefix.length) + else path = normalized + path = path.replace(/^\/+/, '') + path = path.replace(/^private\/tmp\/[^/]+\/[^/]+\//, '').replace(/^private\/tmp\//, '').replace(/^tmp\//, '') if (!path) return 'home' - const parts = path.split('-').filter(Boolean) + const parts = path.split('/').filter(Boolean) if (parts.length <= 3) return parts.join('/') return parts.slice(-3).join('/') } @@ -283,7 +286,7 @@ function ProjectBreakdown({ projects, pw, bw, budgets }: { projects: ProjectSumm return ( - {fit(shortProject(project.project), nw)} + {fit(shortProject(project.projectPath), nw)} {formatCost(project.totalCostUSD).padStart(8)} {avgCost.padStart(PROJECT_COL_AVG)} {String(project.sessions.length).padStart(6)} @@ -443,7 +446,7 @@ const TOP_SESSIONS_CALLS_COL = 6 function TopSessions({ projects, pw, bw }: { projects: ProjectSummary[]; pw: number; bw: number }) { const allSessions = projects.flatMap(p => - p.sessions.map(s => ({ ...s, projectName: p.project })) + p.sessions.map(s => ({ ...s, projectPath: p.projectPath })) ) const top = [...allSessions].sort((a, b) => b.totalCostUSD - a.totalCostUSD).slice(0, 5) @@ -461,7 +464,7 @@ function TopSessions({ projects, pw, bw }: { projects: ProjectSummary[]; pw: num const date = session.firstTimestamp ? session.firstTimestamp.slice(0, TOP_SESSIONS_DATE_LEN) : '----------' - const label = `${date} ${shortProject(session.projectName)}` + const label = `${date} ${shortProject(session.projectPath)}` return ( diff --git a/tests/dashboard.test.ts b/tests/dashboard.test.ts index 0d36e2ed..da802f11 100644 --- a/tests/dashboard.test.ts +++ b/tests/dashboard.test.ts @@ -1,5 +1,8 @@ +import { homedir } from 'os' + import { describe, it, expect } from 'vitest' +import { shortProject } from '../src/dashboard.js' import { formatCost } from '../src/format.js' import type { ProjectSummary, SessionSummary } from '../src/types.js' @@ -53,7 +56,7 @@ function makeProject(name: string, sessions: SessionSummary[]): ProjectSummary { // Logic replicated from TopSessions component function getTopSessions(projects: ProjectSummary[], n = 5) { - const all = projects.flatMap(p => p.sessions.map(s => ({ ...s, projectName: p.project }))) + const all = projects.flatMap(p => p.sessions.map(s => ({ ...s, projectPath: p.projectPath }))) return [...all].sort((a, b) => b.totalCostUSD - a.totalCostUSD).slice(0, n) } @@ -99,6 +102,36 @@ describe('TopSessions - top-5 selection', () => { }) }) +describe('shortProject - path shortening', () => { + const home = homedir() + + it('preserves directory names containing dashes', () => { + expect(shortProject(`${home}/work/my-project`)).toBe('work/my-project') + }) + + it('preserves directory names containing dots', () => { + expect(shortProject(`${home}/work/my.app.io`)).toBe('work/my.app.io') + }) + + it('returns "home" for the home dir itself', () => { + expect(shortProject(home)).toBe('home') + }) + + it('does not strip a sibling whose name shares the home prefix', () => { + const sibling = `${home}-backup/proj` + expect(shortProject(sibling).endsWith('proj')).toBe(true) + expect(shortProject(sibling)).not.toMatch(/^-/) + }) + + it('keeps only the last 3 segments for deeply nested paths', () => { + expect(shortProject(`${home}/a/b/c/d/e/f`)).toBe('d/e/f') + }) + + it('handles paths outside the home dir', () => { + expect(shortProject('/opt/myproject')).toBe('opt/myproject') + }) +}) + describe('avg/s in ProjectBreakdown', () => { it('returns dash for a project with no sessions', () => { const project = makeProject('proj', []) From fe2e622038035b429c08fde6c969a49101e91521 Mon Sep 17 00:00:00 2001 From: Resham Joshi <65915470+iamtoruk@users.noreply.github.com> Date: Mon, 11 May 2026 22:16:00 -0700 Subject: [PATCH 12/17] Skip Cursor bubble rows that lack a createdAt timestamp (#321) Bubble rows without createdAt were defaulting to new Date(), which misattributed historical or undated usage to Today and inflated the daily chart. Now filtered at the SQL level and skipped in application code. Based on the bubble-side fix from #262 by @darthrevanyunka. --- src/providers/cursor.ts | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/src/providers/cursor.ts b/src/providers/cursor.ts index 63625126..ebd7f918 100644 --- a/src/providers/cursor.ts +++ b/src/providers/cursor.ts @@ -329,7 +329,8 @@ const USER_MESSAGES_QUERY = ` // the whole template. The original combined string is preserved as // BUBBLE_QUERY_SINCE for any caller that doesn't want the cap. const BUBBLE_QUERY_SINCE_HEAD = BUBBLE_QUERY_BASE + ` - AND (json_extract(value, '$.createdAt') > ? OR json_extract(value, '$.createdAt') IS NULL)` + AND json_extract(value, '$.createdAt') IS NOT NULL + AND json_extract(value, '$.createdAt') > ?` const BUBBLE_QUERY_SINCE_TAIL = ` ORDER BY ROWID ASC ` @@ -458,6 +459,7 @@ function parseBubbles(db: SqliteDatabase, seenKeys: Set): { calls: Parse } const createdAt = row.created_at ?? '' + if (!createdAt) continue // The JSON `conversationId` field on bubbles is empty in current // Cursor builds. The real composerId lives in the row key // `bubbleId::`. Extract from the key so the @@ -487,7 +489,7 @@ function parseBubbles(db: SqliteDatabase, seenKeys: Set): { calls: Parse const costUSD = calculateCost(pricingModel, inputTokens, outputTokens, 0, 0, 0) - const timestamp = createdAt || new Date().toISOString() + const timestamp = createdAt const userQuestion = takeUserMessage(userMessages, conversationId) const assistantText = blobToText(row.user_text) const userText = (userQuestion + ' ' + assistantText).trim() From f9a5d2c8e6ddfd995ed6063e6bf1ea5e8b6923f6 Mon Sep 17 00:00:00 2001 From: iamtoruk Date: Mon, 11 May 2026 22:19:15 -0700 Subject: [PATCH 13/17] Add changelog entries for project path fix and Cursor undated bubbles --- CHANGELOG.md | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 8f706161..2bf6977d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -15,6 +15,13 @@ `_` names, which the shared MCP pipeline did not recognize. The provider now normalizes these to the canonical `mcp____` form so MCP breakdowns and `optimize` work correctly. Closes #308. +- **Mangled project names in dashboard.** The By Project and Top Sessions + panels decoded slugs by splitting on `-`, which broke directory names + containing dashes or dots (e.g. `my-project` rendered as `my/project`). + Now uses the real project path instead. Closes #196. +- **Cursor undated bubble rows misattributed to Today.** Bubble rows without + a `createdAt` timestamp were defaulting to the current date, inflating + Today's spend. Now skipped at both the SQL and application level. ## 0.9.8 - 2026-05-10 From 151d24fb267b6855453a2add8ed06817a3a8f469 Mon Sep 17 00:00:00 2001 From: Resham Joshi <65915470+iamtoruk@users.noreply.github.com> Date: Mon, 11 May 2026 22:25:32 -0700 Subject: [PATCH 14/17] Drop Z suffix from day-aggregator test timestamps for timezone stability (#322) Timestamps with Z are interpreted as UTC, causing date bucketing tests to fail in non-UTC timezones (e.g. UTC+12 shifts Apr 9 10:00Z to Apr 8). Local timestamps without Z are interpreted in the runtime timezone, matching how the aggregator actually buckets dates. Based on #112 by @lfl1337, extended to cover all affected timestamps. --- tests/day-aggregator.test.ts | 34 +++++++++++++++++----------------- 1 file changed, 17 insertions(+), 17 deletions(-) diff --git a/tests/day-aggregator.test.ts b/tests/day-aggregator.test.ts index 9ca92390..c58937bb 100644 --- a/tests/day-aggregator.test.ts +++ b/tests/day-aggregator.test.ts @@ -46,8 +46,8 @@ describe('aggregateProjectsIntoDays', () => { sessions: [{ sessionId: 's1', project: 'p', - firstTimestamp: '2026-04-09T10:00:00Z', - lastTimestamp: '2026-04-10T08:00:00Z', + firstTimestamp: '2026-04-09T10:00:00', + lastTimestamp: '2026-04-10T08:00:00', totalCostUSD: 10, totalInputTokens: 0, totalOutputTokens: 0, @@ -57,14 +57,14 @@ describe('aggregateProjectsIntoDays', () => { turns: [ { userMessage: 'hi', - timestamp: '2026-04-09T10:00:00Z', + timestamp: '2026-04-09T10:00:00', sessionId: 's1', category: 'coding', retries: 0, hasEdits: true, assistantCalls: [ - makeCall('2026-04-09T10:00:00Z', 4), - makeCall('2026-04-10T08:00:00Z', 6), + makeCall('2026-04-09T10:00:00', 4), + makeCall('2026-04-10T08:00:00', 6), ], }, ], @@ -92,8 +92,8 @@ describe('aggregateProjectsIntoDays', () => { sessions: [{ sessionId: 's1', project: 'p', - firstTimestamp: '2026-04-09T10:00:00Z', - lastTimestamp: '2026-04-09T10:05:00Z', + firstTimestamp: '2026-04-09T10:00:00', + lastTimestamp: '2026-04-09T10:05:00', totalCostUSD: 3, totalInputTokens: 0, totalOutputTokens: 0, @@ -103,12 +103,12 @@ describe('aggregateProjectsIntoDays', () => { turns: [ { userMessage: 'hi', - timestamp: '2026-04-09T10:00:00Z', + timestamp: '2026-04-09T10:00:00', sessionId: 's1', category: 'coding', retries: 0, hasEdits: true, - assistantCalls: [makeCall('2026-04-09T10:00:00Z', 3)], + assistantCalls: [makeCall('2026-04-09T10:00:00', 3)], }, ], modelBreakdown: {}, @@ -138,8 +138,8 @@ describe('aggregateProjectsIntoDays', () => { sessions: [{ sessionId: 's1', project: 'p', - firstTimestamp: '2026-04-09T23:59:00Z', - lastTimestamp: '2026-04-10T00:10:00Z', + firstTimestamp: '2026-04-09T23:59:00', + lastTimestamp: '2026-04-10T00:10:00', totalCostUSD: 1, totalInputTokens: 0, totalOutputTokens: 0, totalCacheReadTokens: 0, totalCacheWriteTokens: 0, apiCalls: 0, @@ -151,7 +151,7 @@ describe('aggregateProjectsIntoDays', () => { }), ] const days = aggregateProjectsIntoDays(projects) - const expectedDate = dateKey('2026-04-09T23:59:00Z') + const expectedDate = dateKey('2026-04-09T23:59:00') expect(days[0]!.date).toBe(expectedDate) expect(days[0]!.sessions).toBe(1) }) @@ -162,18 +162,18 @@ describe('aggregateProjectsIntoDays', () => { sessions: [{ sessionId: 's1', project: 'p', - firstTimestamp: '2026-04-10T10:00:00Z', - lastTimestamp: '2026-04-10T10:00:00Z', + firstTimestamp: '2026-04-10T10:00:00', + lastTimestamp: '2026-04-10T10:00:00', totalCostUSD: 10, totalInputTokens: 0, totalOutputTokens: 0, totalCacheReadTokens: 0, totalCacheWriteTokens: 0, apiCalls: 2, turns: [ { - userMessage: 'x', timestamp: '2026-04-10T10:00:00Z', sessionId: 's1', + userMessage: 'x', timestamp: '2026-04-10T10:00:00', sessionId: 's1', category: 'coding', retries: 0, hasEdits: false, assistantCalls: [ - makeCall('2026-04-10T10:00:00Z', 7, 'Opus 4.7', 'claude'), - makeCall('2026-04-10T10:00:00Z', 3, 'gpt-5', 'codex'), + makeCall('2026-04-10T10:00:00', 7, 'Opus 4.7', 'claude'), + makeCall('2026-04-10T10:00:00', 3, 'gpt-5', 'codex'), ], }, ], From ab87b61bbab55836d01a2186a3aed573b1bc19cb Mon Sep 17 00:00:00 2001 From: Rashid Razak Date: Wed, 13 May 2026 12:18:17 +0800 Subject: [PATCH 15/17] Fix menubar showing empty data after reboot when CLI is installed via fnm/nvm Login-item launches don't source .zshrc, leaving version-manager bin directories (fnm, nvm, volta, asdf) absent from PATH. The menubar's augmentedPath only covered /opt/homebrew/bin and /usr/local/bin, so codeburn was never found after a cold reboot. - Add discoverNodeManagerBinDirs() that dynamically scans for fnm, nvm, volta, and asdf installations and adds the latest Node version's bin directory to PATH - Add PATH logging to DataClient spawn error for easier future diagnosis - Log the swallowed error in hydrateCache() catch block so silent cache-empty failures are visible in stderr - Add scripts/diagnose-menubar-cli.sh for testing restricted-PATH CLI execution without rebuilding the menubar app --- .../CodeBurnMenubar/Data/DataClient.swift | 2 + .../Security/CodeburnCLI.swift | 58 +++++++++++++++ scripts/diagnose-menubar-cli.sh | 73 +++++++++++++++++++ src/cli.ts | 5 +- 4 files changed, 137 insertions(+), 1 deletion(-) create mode 100755 scripts/diagnose-menubar-cli.sh diff --git a/mac/Sources/CodeBurnMenubar/Data/DataClient.swift b/mac/Sources/CodeBurnMenubar/Data/DataClient.swift index 4b0083c0..c3bcdb44 100644 --- a/mac/Sources/CodeBurnMenubar/Data/DataClient.swift +++ b/mac/Sources/CodeBurnMenubar/Data/DataClient.swift @@ -58,6 +58,8 @@ struct DataClient { do { try process.run() } catch { + let path = ProcessInfo.processInfo.environment["PATH"] ?? "(no PATH)" + NSLog("CodeBurn: CLI spawn failed. PATH=%@ error=%@", path, error.localizedDescription) throw DataClientError.spawn(error.localizedDescription) } diff --git a/mac/Sources/CodeBurnMenubar/Security/CodeburnCLI.swift b/mac/Sources/CodeBurnMenubar/Security/CodeburnCLI.swift index 4f4a5f82..88a4e3ca 100644 --- a/mac/Sources/CodeBurnMenubar/Security/CodeburnCLI.swift +++ b/mac/Sources/CodeBurnMenubar/Security/CodeburnCLI.swift @@ -59,6 +59,64 @@ enum CodeburnCLI { for extra in additionalPathEntries where !parts.contains(extra) { parts.append(extra) } + for dir in discoverNodeManagerBinDirs() where !parts.contains(dir) { + parts.append(dir) + } return parts.joined(separator: ":") } + + /// Login-item launches don't source .zshrc, so nvm / fnm / volta / asdf bin + /// directories are absent from PATH. Scan common version-manager locations + /// and add the latest Node version's bin dir so `codeburn` can be found. + private static func discoverNodeManagerBinDirs() -> [String] { + let home = FileManager.default.homeDirectoryForCurrentUser.path + let fm = FileManager.default + + // fnm: ~/.local/share/fnm/node-versions//installation/bin + let fnmVersionsDir = "\(home)/.local/share/fnm/node-versions" + if let latest = latestVersionDir(in: fnmVersionsDir) { + let binDir = "\(fnmVersionsDir)/\(latest)/installation/bin" + if fm.fileExists(atPath: "\(binDir)/node") { + return [binDir] + } + } + + // nvm: ~/.nvm/versions/node//bin + let nvmVersionsDir = "\(home)/.nvm/versions/node" + if let latest = latestVersionDir(in: nvmVersionsDir) { + let binDir = "\(nvmVersionsDir)/\(latest)/bin" + if fm.fileExists(atPath: "\(binDir)/node") { + return [binDir] + } + } + + // volta: ~/.volta/bin (flat, no version dirs) + let voltaBin = "\(home)/.volta/bin" + if fm.fileExists(atPath: "\(voltaBin)/node") { + return [voltaBin] + } + + // asdf: ~/.asdf/shims (flat shim dir) + let asdfShims = "\(home)/.asdf/shims" + if fm.fileExists(atPath: "\(asdfShims)/node") { + return [asdfShims] + } + + return [] + } + + /// Returns the latest version directory name (e.g. "v22.15.0") from a + /// parent directory containing version-named subdirectories. + private static func latestVersionDir(in parent: String) -> String? { + let fm = FileManager.default + var isDir: ObjCBool = false + guard fm.fileExists(atPath: parent, isDirectory: &isDir), isDir.boolValue, + let entries = try? fm.contentsOfDirectory(atPath: parent) else { + return nil + } + return entries + .filter { $0.hasPrefix("v") } + .sorted() + .last + } } diff --git a/scripts/diagnose-menubar-cli.sh b/scripts/diagnose-menubar-cli.sh new file mode 100755 index 00000000..8e171e6d --- /dev/null +++ b/scripts/diagnose-menubar-cli.sh @@ -0,0 +1,73 @@ +#!/bin/bash +# Replicates the menubar's restricted PATH environment to test if the CLI +# can find and run codeburn with the same PATH the menubar provides. +# +# The menubar augments PATH with: /opt/homebrew/bin /usr/local/bin +# The base PATH for a Login Item is typically: /usr/bin:/bin:/usr/sbin:/sbin + +set -euo pipefail + +RESTRICTED_PATH="/usr/bin:/bin:/usr/sbin:/sbin:/opt/homebrew/bin:/usr/local/bin" + +echo "=== Menubar PATH Diagnostic ===" +echo "" +echo "Using restricted PATH: $RESTRICTED_PATH" +echo "" + +# 1. Check if codeburn is found +echo "--- Step 1: Locate codeburn binary ---" +FOUND=$(PATH="$RESTRICTED_PATH" /usr/bin/env which codeburn 2>&1 || true) +if [ -z "$FOUND" ]; then + echo "FAIL: codeburn not found in restricted PATH" + echo "" + echo "Where codeburn actually is:" + /usr/bin/env which -a codeburn 2>/dev/null || echo "(not found anywhere)" + echo "" + echo "Fix: codeburn is installed outside the menubar's PATH. Options:" + echo " 1. Add the install directory to CodeburnCLI.additionalPathEntries" + echo " 2. Symlink codeburn into /usr/local/bin" + exit 1 +fi +echo "OK: codeburn found at: $FOUND" +echo "" + +# 2. Check if node is found (needed for codeburn shell wrapper) +echo "--- Step 2: Locate node binary ---" +NODE_FOUND=$(PATH="$RESTRICTED_PATH" /usr/bin/env which node 2>&1 || true) +if [ -z "$NODE_FOUND" ]; then + echo "WARNING: node not found in restricted PATH" + echo "This may cause codeburn to fail if it's a shell wrapper." + echo "" +else + echo "OK: node found at: $NODE_FOUND" + echo "Node version: $(PATH="$RESTRICTED_PATH" node --version 2>&1 || echo 'failed')" +fi +echo "" + +# 3. Run the command the menubar spawns +echo "--- Step 3: Run menubar-equivalent CLI command ---" +echo "Command: codeburn status --format menubar-json --period today --provider all" +echo "" + +STDERR_FILE=$(mktemp) +trap 'rm -f "$STDERR_FILE"' EXIT + +if PATH="$RESTRICTED_PATH" /usr/bin/env -- codeburn status --format menubar-json --period today --provider all 2>"$STDERR_FILE"; then + echo "" + if [ -s "$STDERR_FILE" ]; then + echo "Warnings/errors on stderr:" + cat "$STDERR_FILE" + fi + echo "" + echo "SUCCESS: CLI ran successfully with restricted PATH." +else + EXIT_CODE=$? + echo "" + echo "FAIL: CLI exited with code $EXIT_CODE" + if [ -s "$STDERR_FILE" ]; then + echo "" + echo "Stderr output:" + cat "$STDERR_FILE" + fi + exit 1 +fi diff --git a/src/cli.ts b/src/cli.ts index abc2b0bc..3efb62ce 100644 --- a/src/cli.ts +++ b/src/cli.ts @@ -32,7 +32,10 @@ async function hydrateCache() { (range) => parseAllSessions(range, 'all'), aggregateProjectsIntoDays, ) - } catch { + } catch (err) { + const message = err instanceof Error ? err.message : String(err) + const stack = err instanceof Error && err.stack ? `\n${err.stack}` : '' + process.stderr.write(`codeburn: hydrateCache failed, returning empty cache: ${message}${stack}\n`) return emptyCache() } } From f663ec01fb98c799b97bb7bc233ce10ea85b65d2 Mon Sep 17 00:00:00 2001 From: Rashid Razak Date: Mon, 11 May 2026 19:06:08 +0800 Subject: [PATCH 16/17] Replace osascript/JXA with native Mach-O agent and SMAppService to fix EDR detection - Add CodeBurnRefreshAgent target: native fire-and-exit binary posting com.codeburn.refresh notification - Rewrite installLaunchAgentIfNeeded(): plist ProgramArguments points to native binary, not osascript/JXA - Rewrite registerLoginItemIfNeeded(): uses SMAppService API instead of osascript/System Events - Add startSocketListener(): Unix domain socket for CLI-triggered menubar refresh - Add src/menubar-socket.ts: CLI-side notifyMenubar() helper wired into status --format menubar-json - Update Package.swift with new product/target, package-app.sh copies agent into bundle Resources - Add tests: plist content verification, login item guard, agent smoke test --- mac/Package.swift | 7 +- mac/Scripts/package-app.sh | 1 + mac/Sources/CodeBurnMenubar/CodeBurnApp.swift | 80 +- mac/Sources/CodeBurnRefreshAgent/main.swift | 8 + .../EDRDetectionFixTests.swift | 137 +++ src/cli.ts | 993 +++++++++++++++++- src/main.ts | 2 + src/menubar-socket.ts | 12 + 8 files changed, 1204 insertions(+), 36 deletions(-) create mode 100644 mac/Sources/CodeBurnRefreshAgent/main.swift create mode 100644 mac/Tests/CodeBurnMenubarTests/EDRDetectionFixTests.swift create mode 100644 src/menubar-socket.ts diff --git a/mac/Package.swift b/mac/Package.swift index 67509f27..ff8161e2 100644 --- a/mac/Package.swift +++ b/mac/Package.swift @@ -7,7 +7,8 @@ let package = Package( .macOS(.v14) ], products: [ - .executable(name: "CodeBurnMenubar", targets: ["CodeBurnMenubar"]) + .executable(name: "CodeBurnMenubar", targets: ["CodeBurnMenubar"]), + .executable(name: "CodeBurnRefreshAgent", targets: ["CodeBurnRefreshAgent"]) ], targets: [ .executableTarget( @@ -17,6 +18,10 @@ let package = Package( .enableUpcomingFeature("StrictConcurrency") ] ), + .executableTarget( + name: "CodeBurnRefreshAgent", + path: "Sources/CodeBurnRefreshAgent" + ), .testTarget( name: "CodeBurnMenubarTests", dependencies: ["CodeBurnMenubar"], diff --git a/mac/Scripts/package-app.sh b/mac/Scripts/package-app.sh index ee0dc065..5d064f53 100755 --- a/mac/Scripts/package-app.sh +++ b/mac/Scripts/package-app.sh @@ -43,6 +43,7 @@ BUNDLE="${DIST_DIR}/${BUNDLE_NAME}" mkdir -p "${BUNDLE}/Contents/MacOS" mkdir -p "${BUNDLE}/Contents/Resources" cp "${BUILT_BINARY}" "${BUNDLE}/Contents/MacOS/${EXECUTABLE_NAME}" +cp "${BIN_PATH}/CodeBurnRefreshAgent" "${BUNDLE}/Contents/Resources/CodeBurnRefreshAgent" cat > "${BUNDLE}/Contents/Info.plist" < diff --git a/mac/Sources/CodeBurnMenubar/CodeBurnApp.swift b/mac/Sources/CodeBurnMenubar/CodeBurnApp.swift index a58d0446..a72e5d3e 100644 --- a/mac/Sources/CodeBurnMenubar/CodeBurnApp.swift +++ b/mac/Sources/CodeBurnMenubar/CodeBurnApp.swift @@ -1,6 +1,7 @@ import SwiftUI import AppKit import Observation +import ServiceManagement private let refreshIntervalSeconds: UInt64 = 30 private let nanosPerSecond: UInt64 = 1_000_000_000 @@ -79,6 +80,7 @@ final class AppDelegate: NSObject, NSApplicationDelegate, NSPopoverDelegate { setupDistributedNotificationListener() installLaunchAgentIfNeeded() registerLoginItemIfNeeded() + startSocketListener() observeSubscriptionDisconnect() Task { await updateChecker.checkIfNeeded() } } @@ -161,6 +163,7 @@ final class AppDelegate: NSObject, NSApplicationDelegate, NSPopoverDelegate { let agentName = "com.codeburn.refresh.plist" let home = fm.homeDirectoryForCurrentUser.path let destPath = "\(home)/Library/LaunchAgents/\(agentName)" + let agentPath = (Bundle.main.resourcePath ?? "") + "/CodeBurnRefreshAgent" let plist = """ @@ -171,11 +174,7 @@ final class AppDelegate: NSObject, NSApplicationDelegate, NSPopoverDelegate { com.codeburn.refresh ProgramArguments - /usr/bin/osascript - -l - JavaScript - -e - ObjC.import("Foundation"); $.NSDistributedNotificationCenter.defaultCenter.postNotificationNameObjectUserInfoDeliverImmediately("com.codeburn.refresh", $(), $(), true) + \(agentPath) StartInterval 30 @@ -209,26 +208,65 @@ final class AppDelegate: NSObject, NSApplicationDelegate, NSPopoverDelegate { } private func registerLoginItemIfNeeded() { - let key = "codeburn.loginItemRegistered" - guard !UserDefaults.standard.bool(forKey: key) else { return } + guard SMAppService.mainApp.status != .enabled else { return } + do { + try SMAppService.mainApp.register() + } catch { + NSLog("CodeBurn: Login item registration failed: \(error)") + } + } + + private func startSocketListener() { + let fm = FileManager.default + let home = fm.homeDirectoryForCurrentUser.path + let cacheDir = "\(home)/.cache/codeburn" + let socketPath = "\(cacheDir)/menubar.sock" - let appPath = Bundle.main.bundlePath - let script = "tell application \"System Events\" to make login item at end with properties {path:\"\(appPath)\", hidden:false}" + try? fm.createDirectory(atPath: cacheDir, withIntermediateDirectories: true) + if fm.fileExists(atPath: socketPath) { + try? fm.removeItem(atPath: socketPath) + } - let process = Process() - process.launchPath = "/usr/bin/osascript" - process.arguments = ["-e", script] - process.standardOutput = FileHandle.nullDevice - process.standardError = FileHandle.nullDevice + let socketFD = Darwin.socket(AF_UNIX, SOCK_STREAM, 0) + guard socketFD >= 0 else { + NSLog("CodeBurn: failed to create socket") + return + } - do { - try process.run() - process.waitUntilExit() - if process.terminationStatus == 0 { - UserDefaults.standard.set(true, forKey: key) + var addr = sockaddr_un() + addr.sun_family = sa_family_t(AF_UNIX) + let socketPathC = (socketPath as NSString).fileSystemRepresentation + withUnsafeMutablePointer(to: &addr.sun_path.0) { ptr in + _ = strcpy(ptr, socketPathC) + } + + let addrSize = socklen_t(MemoryLayout.size) + let bindResult = withUnsafePointer(to: &addr) { + $0.withMemoryRebound(to: sockaddr.self, capacity: 1) { + Darwin.bind(socketFD, $0, addrSize) + } + } + guard bindResult == 0 else { + NSLog("CodeBurn: failed to bind socket at \(socketPath)") + Darwin.close(socketFD) + return + } + + Darwin.listen(socketFD, 5) + + DispatchQueue.global(qos: .background).async { [weak self, socketFD] in + while true { + let clientFD = Darwin.accept(socketFD, nil, nil) + guard clientFD >= 0 else { continue } + var buf: [UInt8] = Array(repeating: 0, count: 1024) + let n = Darwin.read(clientFD, &buf, buf.count) + if n > 0 { + DispatchQueue.main.async { [weak self] in + self?.forceRefresh() + } + } + Darwin.close(clientFD) } - } catch { - NSLog("CodeBurn: Login item registration failed: \(error)") } } diff --git a/mac/Sources/CodeBurnRefreshAgent/main.swift b/mac/Sources/CodeBurnRefreshAgent/main.swift new file mode 100644 index 00000000..56b8ad26 --- /dev/null +++ b/mac/Sources/CodeBurnRefreshAgent/main.swift @@ -0,0 +1,8 @@ +import Foundation + +DistributedNotificationCenter.default().postNotificationName( + .init("com.codeburn.refresh"), + object: nil, + userInfo: nil, + options: .deliverImmediately +) diff --git a/mac/Tests/CodeBurnMenubarTests/EDRDetectionFixTests.swift b/mac/Tests/CodeBurnMenubarTests/EDRDetectionFixTests.swift new file mode 100644 index 00000000..82dc757c --- /dev/null +++ b/mac/Tests/CodeBurnMenubarTests/EDRDetectionFixTests.swift @@ -0,0 +1,137 @@ +import Testing +import Foundation +import ServiceManagement + +private func makePlist(agentPath: String) -> String { + """ + + + + + Label + com.codeburn.refresh + ProgramArguments + + \(agentPath) + + StartInterval + 30 + RunAtLoad + + + +""" +} + +@Suite("LaunchAgent Plist") +struct LaunchAgentPlistTests { + @Test("Plist has correct ProgramArguments") + func programArgumentsIsSingleElementArray() throws { + let plistStr = makePlist(agentPath: "/path/to/CodeBurnRefreshAgent") + let data = Data(plistStr.utf8) + let raw = try PropertyListSerialization.propertyList(from: data, format: nil) + let dict = try #require(raw as? NSDictionary) + let args = try #require(dict["ProgramArguments"] as? [String]) + #expect(args == ["/path/to/CodeBurnRefreshAgent"]) + } + + @Test("Plist has StartInterval of 30") + func startIntervalIs30() throws { + let plistStr = makePlist(agentPath: "/path/to/agent") + let data = Data(plistStr.utf8) + let raw = try PropertyListSerialization.propertyList(from: data, format: nil) + let dict = try #require(raw as? NSDictionary) + let interval = try #require(dict["StartInterval"] as? Int) + #expect(interval == 30) + } + + @Test("Plist has RunAtLoad true") + func runAtLoadIsTrue() throws { + let plistStr = makePlist(agentPath: "/path/to/agent") + let data = Data(plistStr.utf8) + let raw = try PropertyListSerialization.propertyList(from: data, format: nil) + let dict = try #require(raw as? NSDictionary) + let runAtLoad = try #require(dict["RunAtLoad"] as? Bool) + #expect(runAtLoad == true) + } + + @Test("Plist has correct Label") + func labelIsCorrect() throws { + let plistStr = makePlist(agentPath: "/path/to/agent") + let data = Data(plistStr.utf8) + let raw = try PropertyListSerialization.propertyList(from: data, format: nil) + let dict = try #require(raw as? NSDictionary) + let label = try #require(dict["Label"] as? String) + #expect(label == "com.codeburn.refresh") + } + + @Test("Plist idempotency") + func idempotent() { + let a = makePlist(agentPath: "/same/path") + let b = makePlist(agentPath: "/same/path") + #expect(a == b) + } +} + +@Suite("Login Item Guard") +struct LoginItemGuardTests { + @Test("SMAppService.mainApp.status is accessible") + func mainAppStatusIsAccessible() { + // The guard in registerLoginItemIfNeeded(): + // guard SMAppService.mainApp.status != .enabled else { return } + // When status is .enabled, the function returns early (no registration). + // When status is .notRegistered / .requiresApproval, it proceeds to register. + let status = SMAppService.mainApp.status + // In a running app, status is .enabled, .notRegistered, or .requiresApproval. + // In a test context without an app bundle, it may be .notFound (macOS 14+). + let known: Bool = status == .enabled || status == .notRegistered + || status == .requiresApproval || status == .notFound + #expect(known) + } +} + +@Test("CodeBurnRefreshAgent builds and runs successfully") +func agentBuildsAndRuns() throws { + let packageDir = URL(fileURLWithPath: #filePath) + .deletingLastPathComponent() + .deletingLastPathComponent() + .deletingLastPathComponent() + + let scratchDir = FileManager.default.temporaryDirectory + .appendingPathComponent("codeburn-smoke-test-build") + try? FileManager.default.removeItem(at: scratchDir) + + let build = Process() + build.launchPath = "/usr/bin/env" + build.arguments = [ + "swift", "build", "--product", "CodeBurnRefreshAgent", + "--scratch-path", scratchDir.path + ] + build.currentDirectoryURL = packageDir + try build.run() + build.waitUntilExit() + #expect(build.terminationStatus == 0, "Build failed") + + let showPath = Process() + let pipe = Pipe() + showPath.launchPath = "/usr/bin/env" + showPath.arguments = [ + "swift", "build", "--product", "CodeBurnRefreshAgent", + "--scratch-path", scratchDir.path, "--show-bin-path" + ] + showPath.currentDirectoryURL = packageDir + showPath.standardOutput = pipe + try showPath.run() + showPath.waitUntilExit() + + let binPathData = pipe.fileHandleForReading.readDataToEndOfFile() + let binPath = String(data: binPathData, encoding: .utf8)? + .trimmingCharacters(in: .whitespacesAndNewlines) ?? "" + let binaryURL = URL(fileURLWithPath: binPath).appendingPathComponent("CodeBurnRefreshAgent") + + let agent = Process() + agent.launchPath = binaryURL.path + try agent.run() + agent.waitUntilExit() + #expect(agent.terminationStatus == 0, "Agent exited with non-zero status") +} diff --git a/src/cli.ts b/src/cli.ts index dec3d494..abc2b0bc 100644 --- a/src/cli.ts +++ b/src/cli.ts @@ -1,15 +1,980 @@ -#!/usr/bin/env node -// This launcher must stay parseable by Node 18. Do NOT add static imports. -const [major, minor] = process.versions.node.split('.').map(Number) -if (major < 22 || (major === 22 && minor < 13)) { - process.stderr.write( - `codeburn requires Node.js >= 22.13.0 (current: ${process.version})\n` + - 'Upgrade at https://nodejs.org/\n', - ) - process.exit(1) -} - -import('./main.js').catch((err) => { - process.stderr.write(String(err?.message ?? err) + '\n') - process.exit(1) +import { Command } from 'commander' +import { installMenubarApp } from './menubar-installer.js' +import { notifyMenubar } from './menubar-socket.js' +import { exportCsv, exportJson, type PeriodExport } from './export.js' +import { loadPricing, setModelAliases } from './models.js' +import { parseAllSessions, filterProjectsByName } from './parser.js' +import { convertCost } from './currency.js' +import { renderStatusBar } from './format.js' +import { type PeriodData, type ProviderCost } from './menubar-json.js' +import { buildMenubarPayload } from './menubar-json.js' +import { getDaysInRange, ensureCacheHydrated, emptyCache, BACKFILL_DAYS, toDateString } from './daily-cache.js' +import { aggregateProjectsIntoDays, buildPeriodDataFromDays, dateKey } from './day-aggregator.js' +import { CATEGORY_LABELS, type DateRange, type ProjectSummary, type TaskCategory } from './types.js' +import { aggregateModelEfficiency } from './model-efficiency.js' +import { renderDashboard } from './dashboard.js' +import { formatDateRangeLabel, parseDateRangeFlags, getDateRange, toPeriod, type Period } from './cli-date.js' +import { runOptimize, scanAndDetect } from './optimize.js' +import { renderCompare } from './compare.js' +import { getAllProviders } from './providers/index.js' +import { clearPlan, readConfig, readPlan, saveConfig, savePlan, getConfigFilePath, type PlanId } from './config.js' +import { clampResetDay, getPlanUsageOrNull, type PlanUsage } from './plan-usage.js' +import { getPresetPlan, isPlanId, isPlanProvider, planDisplayName } from './plans.js' +import { createRequire } from 'node:module' + +const require = createRequire(import.meta.url) +const { version } = require('../package.json') +import { loadCurrency, getCurrency, isValidCurrencyCode } from './currency.js' + +async function hydrateCache() { + try { + return await ensureCacheHydrated( + (range) => parseAllSessions(range, 'all'), + aggregateProjectsIntoDays, + ) + } catch { + return emptyCache() + } +} + +function collect(val: string, acc: string[]): string[] { + acc.push(val) + return acc +} + +function parseNumber(value: string): number { + return Number(value) +} + +function parseInteger(value: string): number { + return parseInt(value, 10) +} + +type JsonPlanSummary = { + id: PlanId + budget: number + spent: number + percentUsed: number + status: 'under' | 'near' | 'over' + projectedMonthEnd: number + daysUntilReset: number + periodStart: string + periodEnd: string +} + +function toJsonPlanSummary(planUsage: PlanUsage): JsonPlanSummary { + return { + id: planUsage.plan.id, + budget: convertCost(planUsage.budgetUsd), + spent: convertCost(planUsage.spentApiEquivalentUsd), + percentUsed: Math.round(planUsage.percentUsed * 10) / 10, + status: planUsage.status, + projectedMonthEnd: convertCost(planUsage.projectedMonthUsd), + daysUntilReset: planUsage.daysUntilReset, + periodStart: planUsage.periodStart.toISOString(), + periodEnd: planUsage.periodEnd.toISOString(), + } +} + +function assertFormat(value: string, allowed: readonly string[], command: string): void { + if (!allowed.includes(value)) { + process.stderr.write( + `codeburn ${command}: unknown format "${value}". Valid values: ${allowed.join(', ')}.\n` + ) + process.exit(1) + } +} + +async function runJsonReport(period: Period, provider: string, project: string[], exclude: string[]): Promise { + await loadPricing() + const { range, label } = getDateRange(period) + const projects = filterProjectsByName(await parseAllSessions(range, provider), project, exclude) + const report: ReturnType & { plan?: JsonPlanSummary } = buildJsonReport(projects, label, period) + const planUsage = await getPlanUsageOrNull() + if (planUsage) { + report.plan = toJsonPlanSummary(planUsage) + } + console.log(JSON.stringify(report, null, 2)) +} + +const program = new Command() + .name('codeburn') + .description('See where your AI coding tokens go - by task, tool, model, and project') + .version(version) + .option('--verbose', 'print warnings to stderr on read failures and skipped files') + .option('--timezone ', 'IANA timezone for date grouping (e.g. Asia/Tokyo, America/New_York)') + +program.hook('preAction', async (thisCommand) => { + const tz = thisCommand.opts<{ timezone?: string }>().timezone ?? process.env['CODEBURN_TZ'] + if (tz) { + try { + Intl.DateTimeFormat(undefined, { timeZone: tz }) + } catch { + console.error(`\n Invalid timezone: "${tz}". Use an IANA timezone like "America/New_York" or "Asia/Tokyo".\n`) + process.exit(1) + } + process.env.TZ = tz + } + const config = await readConfig() + setModelAliases(config.modelAliases ?? {}) + if (thisCommand.opts<{ verbose?: boolean }>().verbose) { + process.env['CODEBURN_VERBOSE'] = '1' + } + await loadCurrency() }) + +function buildJsonReport(projects: ProjectSummary[], period: string, periodKey: string) { + const sessions = projects.flatMap(p => p.sessions) + const { code } = getCurrency() + + const totalCostUSD = projects.reduce((s, p) => s + p.totalCostUSD, 0) + const totalCalls = projects.reduce((s, p) => s + p.totalApiCalls, 0) + const totalSessions = projects.reduce((s, p) => s + p.sessions.length, 0) + const totalInput = sessions.reduce((s, sess) => s + sess.totalInputTokens, 0) + const totalOutput = sessions.reduce((s, sess) => s + sess.totalOutputTokens, 0) + const totalCacheRead = sessions.reduce((s, sess) => s + sess.totalCacheReadTokens, 0) + const totalCacheWrite = sessions.reduce((s, sess) => s + sess.totalCacheWriteTokens, 0) + // Match src/menubar-json.ts:cacheHitPercent: reads over reads+fresh-input. cache_write + // counts tokens being stored, not served, so it doesn't belong in the denominator. + const cacheHitDenom = totalInput + totalCacheRead + const cacheHitPercent = cacheHitDenom > 0 ? Math.round((totalCacheRead / cacheHitDenom) * 1000) / 10 : 0 + + // Per-day rollup. Mirrors parser.ts categoryBreakdown semantics so a + // consumer summing daily[].editTurns over a period gets the same total as + // sum(activities[].editTurns) for that period: every turn counts once for + // `turns`, edit turns count for `editTurns`, edit turns with zero retries + // count for `oneShotTurns`. Issue #279 — daily-resolution efficiency + // dashboards need this without re-deriving from activity-level rollups. + const dailyMap: Record = {} + for (const sess of sessions) { + for (const turn of sess.turns) { + // Prefer the user-message timestamp on the turn; fall back to the first + // assistant-call timestamp when the user line is missing (continuation + // sessions where the JSONL begins mid-conversation). Previously these + // turns dropped from daily but stayed in activities, breaking the + // sum(daily[].editTurns) === sum(activities[].editTurns) invariant. + const ts = turn.timestamp || turn.assistantCalls[0]?.timestamp + if (!ts) { continue } + const day = dateKey(ts) + if (!dailyMap[day]) { dailyMap[day] = { cost: 0, calls: 0, turns: 0, editTurns: 0, oneShotTurns: 0 } } + dailyMap[day].turns += 1 + if (turn.hasEdits) { + dailyMap[day].editTurns += 1 + if (turn.retries === 0) dailyMap[day].oneShotTurns += 1 + } + for (const call of turn.assistantCalls) { + dailyMap[day].cost += call.costUSD + dailyMap[day].calls += 1 + } + } + } + const daily = Object.entries(dailyMap).sort().map(([date, d]) => ({ + date, + cost: convertCost(d.cost), + calls: d.calls, + turns: d.turns, + editTurns: d.editTurns, + oneShotTurns: d.oneShotTurns, + // Pre-computed convenience for dashboards that don't want to do the math. + // null when there are no edit turns (the rate is undefined, not zero — + // a day where the user only had Q&A turns shouldn't read as 0% one-shot). + oneShotRate: d.editTurns > 0 + ? Math.round((d.oneShotTurns / d.editTurns) * 1000) / 10 + : null, + })) + + const projectList = projects.map(p => ({ + name: p.project, + path: p.projectPath, + cost: convertCost(p.totalCostUSD), + avgCostPerSession: p.sessions.length > 0 + ? convertCost(p.totalCostUSD / p.sessions.length) + : null, + calls: p.totalApiCalls, + sessions: p.sessions.length, + })) + + const modelMap: Record = {} + const modelEfficiency = aggregateModelEfficiency(projects) + for (const sess of sessions) { + for (const [model, d] of Object.entries(sess.modelBreakdown)) { + if (!modelMap[model]) { modelMap[model] = { calls: 0, cost: 0, inputTokens: 0, outputTokens: 0, cacheReadTokens: 0, cacheWriteTokens: 0 } } + modelMap[model].calls += d.calls + modelMap[model].cost += d.costUSD + modelMap[model].inputTokens += d.tokens.inputTokens + modelMap[model].outputTokens += d.tokens.outputTokens + modelMap[model].cacheReadTokens += d.tokens.cacheReadInputTokens + modelMap[model].cacheWriteTokens += d.tokens.cacheCreationInputTokens + } + } + const models = Object.entries(modelMap) + .sort(([, a], [, b]) => b.cost - a.cost) + .map(([name, { cost, ...rest }]) => { + const efficiency = modelEfficiency.get(name) + return { + name, + ...rest, + cost: convertCost(cost), + editTurns: efficiency?.editTurns ?? 0, + oneShotTurns: efficiency?.oneShotTurns ?? 0, + oneShotRate: efficiency?.oneShotRate ?? null, + retriesPerEdit: efficiency?.retriesPerEdit ?? null, + costPerEdit: efficiency?.costPerEditUSD !== null && efficiency?.costPerEditUSD !== undefined + ? convertCost(efficiency.costPerEditUSD) + : null, + } + }) + + const catMap: Record = {} + for (const sess of sessions) { + for (const [cat, d] of Object.entries(sess.categoryBreakdown)) { + if (!catMap[cat]) { catMap[cat] = { turns: 0, cost: 0, editTurns: 0, oneShotTurns: 0 } } + catMap[cat].turns += d.turns + catMap[cat].cost += d.costUSD + catMap[cat].editTurns += d.editTurns + catMap[cat].oneShotTurns += d.oneShotTurns + } + } + const activities = Object.entries(catMap) + .sort(([, a], [, b]) => b.cost - a.cost) + .map(([cat, d]) => ({ + category: CATEGORY_LABELS[cat as TaskCategory] ?? cat, + cost: convertCost(d.cost), + turns: d.turns, + editTurns: d.editTurns, + oneShotTurns: d.oneShotTurns, + oneShotRate: d.editTurns > 0 ? Math.round((d.oneShotTurns / d.editTurns) * 1000) / 10 : null, + })) + + const toolMap: Record = {} + const mcpMap: Record = {} + const bashMap: Record = {} + for (const sess of sessions) { + for (const [tool, d] of Object.entries(sess.toolBreakdown)) { + toolMap[tool] = (toolMap[tool] ?? 0) + d.calls + } + for (const [server, d] of Object.entries(sess.mcpBreakdown)) { + mcpMap[server] = (mcpMap[server] ?? 0) + d.calls + } + for (const [cmd, d] of Object.entries(sess.bashBreakdown)) { + bashMap[cmd] = (bashMap[cmd] ?? 0) + d.calls + } + } + + const sortedMap = (m: Record) => + Object.entries(m).sort(([, a], [, b]) => b - a).map(([name, calls]) => ({ name, calls })) + + const topSessions = projects + .flatMap(p => p.sessions.map(s => ({ project: p.project, sessionId: s.sessionId, date: s.firstTimestamp ? dateKey(s.firstTimestamp) : null, cost: convertCost(s.totalCostUSD), calls: s.apiCalls }))) + .sort((a, b) => b.cost - a.cost) + .slice(0, 5) + + return { + generated: new Date().toISOString(), + currency: code, + period, + periodKey, + overview: { + cost: convertCost(totalCostUSD), + calls: totalCalls, + sessions: totalSessions, + cacheHitPercent, + tokens: { + input: totalInput, + output: totalOutput, + cacheRead: totalCacheRead, + cacheWrite: totalCacheWrite, + }, + }, + daily, + projects: projectList, + models, + activities, + tools: sortedMap(toolMap), + mcpServers: sortedMap(mcpMap), + shellCommands: sortedMap(bashMap), + topSessions, + } +} + +program + .command('report', { isDefault: true }) + .description('Interactive usage dashboard') + .option('-p, --period ', 'Starting period: today, week, 30days, month, all', 'week') + .option('--from ', 'Start date (YYYY-MM-DD). Overrides --period when set') + .option('--to ', 'End date (YYYY-MM-DD). Overrides --period when set') + .option('--provider ', 'Filter by provider (e.g. claude, gemini, cursor, copilot)', 'all') + .option('--format ', 'Output format: tui, json', 'tui') + .option('--project ', 'Show only projects matching name (repeatable)', collect, []) + .option('--exclude ', 'Exclude projects matching name (repeatable)', collect, []) + .option('--refresh ', 'Auto-refresh interval in seconds (0 to disable)', parseInteger, 30) + .action(async (opts) => { + assertFormat(opts.format, ['tui', 'json'], 'report') + let customRange: DateRange | null = null + try { + customRange = parseDateRangeFlags(opts.from, opts.to) + } catch (err) { + const message = err instanceof Error ? err.message : String(err) + console.error(`\n Error: ${message}\n`) + process.exit(1) + } + + const period = toPeriod(opts.period) + if (opts.format === 'json') { + await loadPricing() + await hydrateCache() + if (customRange) { + const label = formatDateRangeLabel(opts.from, opts.to) + const projects = filterProjectsByName( + await parseAllSessions(customRange, opts.provider), + opts.project, + opts.exclude, + ) + console.log(JSON.stringify(buildJsonReport(projects, label, 'custom'), null, 2)) + } else { + await runJsonReport(period, opts.provider, opts.project, opts.exclude) + } + return + } + await hydrateCache() + const customRangeLabel = customRange ? formatDateRangeLabel(opts.from, opts.to) : undefined + await renderDashboard(period, opts.provider, opts.refresh, opts.project, opts.exclude, customRange, customRangeLabel) + }) + +function buildPeriodData(label: string, projects: ProjectSummary[]): PeriodData { + const sessions = projects.flatMap(p => p.sessions) + const catTotals: Record = {} + const modelTotals: Record = {} + let inputTokens = 0, outputTokens = 0, cacheReadTokens = 0, cacheWriteTokens = 0 + + for (const sess of sessions) { + inputTokens += sess.totalInputTokens + outputTokens += sess.totalOutputTokens + cacheReadTokens += sess.totalCacheReadTokens + cacheWriteTokens += sess.totalCacheWriteTokens + for (const [cat, d] of Object.entries(sess.categoryBreakdown)) { + if (!catTotals[cat]) catTotals[cat] = { turns: 0, cost: 0, editTurns: 0, oneShotTurns: 0 } + catTotals[cat].turns += d.turns + catTotals[cat].cost += d.costUSD + catTotals[cat].editTurns += d.editTurns + catTotals[cat].oneShotTurns += d.oneShotTurns + } + for (const [model, d] of Object.entries(sess.modelBreakdown)) { + if (!modelTotals[model]) modelTotals[model] = { calls: 0, cost: 0 } + modelTotals[model].calls += d.calls + modelTotals[model].cost += d.costUSD + } + } + + return { + label, + cost: projects.reduce((s, p) => s + p.totalCostUSD, 0), + calls: projects.reduce((s, p) => s + p.totalApiCalls, 0), + sessions: projects.reduce((s, p) => s + p.sessions.length, 0), + inputTokens, outputTokens, cacheReadTokens, cacheWriteTokens, + categories: Object.entries(catTotals) + .sort(([, a], [, b]) => b.cost - a.cost) + .map(([cat, d]) => ({ name: CATEGORY_LABELS[cat as TaskCategory] ?? cat, ...d })), + models: Object.entries(modelTotals) + .sort(([, a], [, b]) => b.cost - a.cost) + .map(([name, d]) => ({ name, ...d })), + } +} + +program + .command('status') + .description('Compact status output (today + month)') + .option('--format ', 'Output format: terminal, menubar-json, json', 'terminal') + .option('--provider ', 'Filter by provider (e.g. claude, gemini, cursor, copilot)', 'all') + .option('--project ', 'Show only projects matching name (repeatable)', collect, []) + .option('--exclude ', 'Exclude projects matching name (repeatable)', collect, []) + .option('--period ', 'Primary period for menubar-json: today, week, 30days, month, all', 'today') + .option('--no-optimize', 'Skip optimize findings (menubar-json only, faster)') + .action(async (opts) => { + assertFormat(opts.format, ['terminal', 'menubar-json', 'json'], 'status') + await loadPricing() + const pf = opts.provider + const fp = (p: ProjectSummary[]) => filterProjectsByName(p, opts.project, opts.exclude) + if (opts.format === 'menubar-json') { + const periodInfo = getDateRange(opts.period) + const now = new Date() + const todayStart = new Date(now.getFullYear(), now.getMonth(), now.getDate()) + const yesterdayStr = toDateString(new Date(now.getFullYear(), now.getMonth(), now.getDate() - 1)) + const isAllProviders = pf === 'all' + + const cache = await hydrateCache() + + // CURRENT PERIOD DATA + // - .all provider: assemble from cache + today (fast) + // - specific provider: parse the period range with provider filter (correct, but slower) + let currentData: PeriodData + let scanProjects: ProjectSummary[] + let scanRange: DateRange + + if (isAllProviders) { + // Parse only today's sessions; historical data comes from cache to avoid double-counting + const todayRange: DateRange = { start: todayStart, end: new Date() } + const todayProjects = fp(await parseAllSessions(todayRange, 'all')) + const todayDays = aggregateProjectsIntoDays(todayProjects) + const rangeStartStr = toDateString(periodInfo.range.start) + const rangeEndStr = toDateString(periodInfo.range.end) + const historicalDays = getDaysInRange(cache, rangeStartStr, yesterdayStr) + const todayInRange = todayDays.filter(d => d.date >= rangeStartStr && d.date <= rangeEndStr) + const allDays = [...historicalDays, ...todayInRange].sort((a, b) => a.date.localeCompare(b.date)) + currentData = buildPeriodDataFromDays(allDays, periodInfo.label) + scanProjects = todayProjects + scanRange = periodInfo.range + } else { + const projects = fp(await parseAllSessions(periodInfo.range, pf)) + currentData = buildPeriodData(periodInfo.label, projects) + scanProjects = projects + scanRange = periodInfo.range + } + + // PROVIDERS + // For .all: enumerate every provider with cost across the period (from cache) + installed-but-zero. + // For specific: just this single provider with its scoped cost. + const allProviders = await getAllProviders() + const displayNameByName = new Map(allProviders.map(p => [p.name, p.displayName])) + const providers: ProviderCost[] = [] + if (isAllProviders) { + // Parse only today; historical provider costs come from cache + const todayRangeForProviders: DateRange = { start: todayStart, end: new Date() } + const todayDaysForProviders = aggregateProjectsIntoDays(fp(await parseAllSessions(todayRangeForProviders, 'all'))) + const rangeStartStr = toDateString(periodInfo.range.start) + const todayStr = toDateString(todayStart) + const allDaysForProviders = [ + ...getDaysInRange(cache, rangeStartStr, yesterdayStr), + ...todayDaysForProviders.filter(d => d.date === todayStr), + ] + const providerTotals: Record = {} + for (const d of allDaysForProviders) { + for (const [name, p] of Object.entries(d.providers)) { + providerTotals[name] = (providerTotals[name] ?? 0) + p.cost + } + } + for (const [name, cost] of Object.entries(providerTotals)) { + providers.push({ name: displayNameByName.get(name) ?? name, cost }) + } + for (const p of allProviders) { + if (providers.some(pc => pc.name === p.displayName)) continue + const sources = await p.discoverSessions() + if (sources.length > 0) providers.push({ name: p.displayName, cost: 0 }) + } + } else { + const display = displayNameByName.get(pf) ?? pf + providers.push({ name: display, cost: currentData.cost }) + } + + // DAILY HISTORY (last 365 days) + // Cache stores per-provider cost+calls per day in DailyEntry.providers, so we can derive + // a provider-filtered history without re-parsing. Tokens aren't broken down per provider + // in the cache, so the filtered view shows zero tokens (heatmap/trend still works on cost). + const historyStartStr = toDateString(new Date(now.getFullYear(), now.getMonth(), now.getDate() - BACKFILL_DAYS)) + const allCacheDays = getDaysInRange(cache, historyStartStr, yesterdayStr) + // Parse only today for history; historical days come from cache + const todayRangeForHistory: DateRange = { start: todayStart, end: new Date() } + const allTodayDaysForHistory = aggregateProjectsIntoDays(fp(await parseAllSessions(todayRangeForHistory, 'all'))) + const todayStrForHistory = toDateString(todayStart) + const fullHistory = [...allCacheDays, ...allTodayDaysForHistory.filter(d => d.date === todayStrForHistory)] + const dailyHistory = fullHistory.map(d => { + if (isAllProviders) { + const topModels = Object.entries(d.models) + .filter(([name]) => name !== '') + .sort(([, a], [, b]) => b.cost - a.cost) + .slice(0, 5) + .map(([name, m]) => ({ + name, + cost: m.cost, + calls: m.calls, + inputTokens: m.inputTokens, + outputTokens: m.outputTokens, + })) + return { + date: d.date, + cost: d.cost, + calls: d.calls, + inputTokens: d.inputTokens, + outputTokens: d.outputTokens, + cacheReadTokens: d.cacheReadTokens, + cacheWriteTokens: d.cacheWriteTokens, + topModels, + } + } + const prov = d.providers[pf] ?? { calls: 0, cost: 0 } + return { + date: d.date, + cost: prov.cost, + calls: prov.calls, + inputTokens: 0, + outputTokens: 0, + cacheReadTokens: 0, + cacheWriteTokens: 0, + topModels: [], + } + }) + + const optimize = opts.optimize === false ? null : await scanAndDetect(scanProjects, scanRange) + console.log(JSON.stringify(buildMenubarPayload(currentData, providers, optimize, dailyHistory))) + notifyMenubar() + return + } + + if (opts.format === 'json') { + await hydrateCache() + const todayData = buildPeriodData('today', fp(await parseAllSessions(getDateRange('today').range, pf))) + const monthData = buildPeriodData('month', fp(await parseAllSessions(getDateRange('month').range, pf))) + const { code, rate } = getCurrency() + const payload: { + currency: string + today: { cost: number; calls: number } + month: { cost: number; calls: number } + plan?: JsonPlanSummary + } = { + currency: code, + today: { cost: Math.round(todayData.cost * rate * 100) / 100, calls: todayData.calls }, + month: { cost: Math.round(monthData.cost * rate * 100) / 100, calls: monthData.calls }, + } + const planUsage = await getPlanUsageOrNull() + if (planUsage) { + payload.plan = toJsonPlanSummary(planUsage) + } + console.log(JSON.stringify(payload)) + return + } + + await hydrateCache() + const monthProjects = fp(await parseAllSessions(getDateRange('month').range, pf)) + console.log(renderStatusBar(monthProjects)) + }) + +program + .command('today') + .description('Today\'s usage dashboard') + .option('--provider ', 'Filter by provider (e.g. claude, gemini, cursor, copilot)', 'all') + .option('--format ', 'Output format: tui, json', 'tui') + .option('--project ', 'Show only projects matching name (repeatable)', collect, []) + .option('--exclude ', 'Exclude projects matching name (repeatable)', collect, []) + .option('--refresh ', 'Auto-refresh interval in seconds (0 to disable)', parseInteger, 30) + .action(async (opts) => { + assertFormat(opts.format, ['tui', 'json'], 'today') + if (opts.format === 'json') { + await runJsonReport('today', opts.provider, opts.project, opts.exclude) + return + } + await hydrateCache() + await renderDashboard('today', opts.provider, opts.refresh, opts.project, opts.exclude) + }) + +program + .command('month') + .description('This month\'s usage dashboard') + .option('--provider ', 'Filter by provider (e.g. claude, gemini, cursor, copilot)', 'all') + .option('--format ', 'Output format: tui, json', 'tui') + .option('--project ', 'Show only projects matching name (repeatable)', collect, []) + .option('--exclude ', 'Exclude projects matching name (repeatable)', collect, []) + .option('--refresh ', 'Auto-refresh interval in seconds (0 to disable)', parseInteger, 30) + .action(async (opts) => { + assertFormat(opts.format, ['tui', 'json'], 'month') + if (opts.format === 'json') { + await runJsonReport('month', opts.provider, opts.project, opts.exclude) + return + } + await hydrateCache() + await renderDashboard('month', opts.provider, opts.refresh, opts.project, opts.exclude) + }) + +program + .command('export') + .description('Export usage data to CSV or JSON') + .option('-f, --format ', 'Export format: csv, json', 'csv') + .option('-o, --output ', 'Output file path') + .option('--from ', 'Start date (YYYY-MM-DD). Exports a single custom period when set') + .option('--to ', 'End date (YYYY-MM-DD). Exports a single custom period when set') + .option('--provider ', 'Filter by provider (e.g. claude, gemini, cursor, copilot)', 'all') + .option('--project ', 'Show only projects matching name (repeatable)', collect, []) + .option('--exclude ', 'Exclude projects matching name (repeatable)', collect, []) + .action(async (opts) => { + assertFormat(opts.format, ['csv', 'json'], 'export') + await loadPricing() + await hydrateCache() + const pf = opts.provider + const fp = (p: ProjectSummary[]) => filterProjectsByName(p, opts.project, opts.exclude) + let customRange: DateRange | null = null + try { + customRange = parseDateRangeFlags(opts.from, opts.to) + } catch (err) { + const message = err instanceof Error ? err.message : String(err) + console.error(`\n Error: ${message}\n`) + process.exit(1) + } + + const periods: PeriodExport[] = customRange + ? [{ label: formatDateRangeLabel(opts.from, opts.to), projects: fp(await parseAllSessions(customRange, pf)) }] + : [ + { label: 'Today', projects: fp(await parseAllSessions(getDateRange('today').range, pf)) }, + { label: '7 Days', projects: fp(await parseAllSessions(getDateRange('week').range, pf)) }, + { label: '30 Days', projects: fp(await parseAllSessions(getDateRange('30days').range, pf)) }, + ] + + if (periods.every(p => p.projects.length === 0)) { + console.log('\n No usage data found.\n') + return + } + + const defaultName = `codeburn-${toDateString(new Date())}` + const outputPath = opts.output ?? `${defaultName}.${opts.format}` + + let savedPath: string + try { + if (opts.format === 'json') { + savedPath = await exportJson(periods, outputPath) + } else { + savedPath = await exportCsv(periods, outputPath) + } + } catch (err) { + // Protection guards in export.ts (symlink refusal, non-codeburn folder refusal, etc.) + // throw with a user-readable message. Print just the message, not the stack, so the CLI + // doesn't spray its internals at the user. + const message = err instanceof Error ? err.message : String(err) + console.error(`\n Export failed: ${message}\n`) + process.exit(1) + } + + const exportedLabel = customRange ? formatDateRangeLabel(opts.from, opts.to) : 'Today + 7 Days + 30 Days' + console.log(`\n Exported (${exportedLabel}) to: ${savedPath}\n`) + }) + +program + .command('menubar') + .description('Install and launch the macOS menubar app (one command, no clone)') + .option('--force', 'Reinstall even if an older copy is already in ~/Applications') + .action(async (opts: { force?: boolean }) => { + try { + const result = await installMenubarApp({ force: opts.force }) + console.log(`\n Ready. ${result.installedPath}\n`) + } catch (err) { + const message = err instanceof Error ? err.message : String(err) + console.error(`\n Menubar install failed: ${message}\n`) + process.exit(1) + } + }) + +program + .command('currency [code]') + .description('Set display currency (e.g. codeburn currency GBP)') + .option('--symbol ', 'Override the currency symbol') + .option('--reset', 'Reset to USD (removes currency config)') + .action(async (code?: string, opts?: { symbol?: string; reset?: boolean }) => { + if (opts?.reset) { + const config = await readConfig() + delete config.currency + await saveConfig(config) + console.log('\n Currency reset to USD.\n') + return + } + + if (!code) { + const { code: activeCode, rate, symbol } = getCurrency() + if (activeCode === 'USD' && rate === 1) { + console.log('\n Currency: USD (default)') + console.log(` Config: ${getConfigFilePath()}\n`) + } else { + console.log(`\n Currency: ${activeCode}`) + console.log(` Symbol: ${symbol}`) + console.log(` Rate: 1 USD = ${rate} ${activeCode}`) + console.log(` Config: ${getConfigFilePath()}\n`) + } + return + } + + const upperCode = code.toUpperCase() + if (!isValidCurrencyCode(upperCode)) { + console.error(`\n "${code}" is not a valid ISO 4217 currency code.\n`) + process.exitCode = 1 + return + } + + const config = await readConfig() + config.currency = { + code: upperCode, + ...(opts?.symbol ? { symbol: opts.symbol } : {}), + } + await saveConfig(config) + + await loadCurrency() + const { rate, symbol } = getCurrency() + + console.log(`\n Currency set to ${upperCode}.`) + console.log(` Symbol: ${symbol}`) + console.log(` Rate: 1 USD = ${rate} ${upperCode}`) + console.log(` Config saved to ${getConfigFilePath()}\n`) + }) + +program + .command('model-alias [from] [to]') + .description('Map a provider model name to a canonical one for pricing (e.g. codeburn model-alias my-model claude-opus-4-6)') + .option('--remove ', 'Remove an alias') + .option('--list', 'List configured aliases') + .action(async (from?: string, to?: string, opts?: { remove?: string; list?: boolean }) => { + const config = await readConfig() + const aliases = config.modelAliases ?? {} + + if (opts?.list || (!from && !opts?.remove)) { + const entries = Object.entries(aliases) + if (entries.length === 0) { + console.log('\n No model aliases configured.') + console.log(` Config: ${getConfigFilePath()}\n`) + } else { + console.log('\n Model aliases:') + for (const [src, dst] of entries) { + console.log(` ${src} -> ${dst}`) + } + console.log(` Config: ${getConfigFilePath()}\n`) + } + return + } + + if (opts?.remove) { + if (!(opts.remove in aliases)) { + console.error(`\n Alias not found: ${opts.remove}\n`) + process.exitCode = 1 + return + } + delete aliases[opts.remove] + config.modelAliases = Object.keys(aliases).length > 0 ? aliases : undefined + await saveConfig(config) + console.log(`\n Removed alias: ${opts.remove}\n`) + return + } + + if (!from || !to) { + console.error('\n Usage: codeburn model-alias \n') + process.exitCode = 1 + return + } + + aliases[from] = to + config.modelAliases = aliases + await saveConfig(config) + console.log(`\n Alias saved: ${from} -> ${to}`) + console.log(` Config: ${getConfigFilePath()}\n`) + }) + +program + .command('plan [action] [id]') + .description('Show or configure a subscription plan for overage tracking') + .option('--format ', 'Output format: text or json', 'text') + .option('--monthly-usd ', 'Monthly plan price in USD (for custom)', parseNumber) + .option('--provider ', 'Provider scope: all, claude, codex, cursor', 'all') + .option('--reset-day ', 'Day of month plan resets (1-28)', parseInteger, 1) + .action(async (action?: string, id?: string, opts?: { format?: string; monthlyUsd?: number; provider?: string; resetDay?: number }) => { + assertFormat(opts?.format ?? 'text', ['text', 'json'], 'plan') + const mode = action ?? 'show' + + if (mode === 'show') { + const plan = await readPlan() + const displayPlan = !plan || plan.id === 'none' + ? { id: 'none', monthlyUsd: 0, provider: 'all', resetDay: 1, setAt: null } + : { + id: plan.id, + monthlyUsd: plan.monthlyUsd, + provider: plan.provider, + resetDay: clampResetDay(plan.resetDay), + setAt: plan.setAt, + } + if (opts?.format === 'json') { + console.log(JSON.stringify(displayPlan)) + return + } + if (!plan || plan.id === 'none') { + console.log('\n Plan: none') + console.log(' API-pricing view is active.') + console.log(` Config: ${getConfigFilePath()}\n`) + return + } + console.log(`\n Plan: ${planDisplayName(plan.id)} (${plan.id})`) + console.log(` Budget: $${plan.monthlyUsd}/month`) + console.log(` Provider: ${plan.provider}`) + console.log(` Reset day: ${clampResetDay(plan.resetDay)}`) + console.log(` Set at: ${plan.setAt}`) + console.log(` Config: ${getConfigFilePath()}\n`) + return + } + + if (mode === 'reset') { + await clearPlan() + console.log('\n Plan reset. API-pricing view is active.\n') + return + } + + if (mode !== 'set') { + console.error('\n Usage: codeburn plan [set | reset]\n') + process.exitCode = 1 + return + } + + if (!id || !isPlanId(id)) { + console.error(`\n Plan id must be one of: claude-pro, claude-max, cursor-pro, custom, none; got "${id ?? ''}".\n`) + process.exitCode = 1 + return + } + + const resetDay = opts?.resetDay ?? 1 + if (!Number.isInteger(resetDay) || resetDay < 1 || resetDay > 28) { + console.error(`\n --reset-day must be an integer from 1 to 28; got ${resetDay}.\n`) + process.exitCode = 1 + return + } + + if (id === 'none') { + await clearPlan() + console.log('\n Plan reset. API-pricing view is active.\n') + return + } + + if (id === 'custom') { + if (opts?.monthlyUsd === undefined) { + console.error('\n Custom plans require --monthly-usd .\n') + process.exitCode = 1 + return + } + const monthlyUsd = opts.monthlyUsd + if (!Number.isFinite(monthlyUsd) || monthlyUsd <= 0) { + console.error(`\n --monthly-usd must be a positive number; got ${opts.monthlyUsd}.\n`) + process.exitCode = 1 + return + } + const provider = opts?.provider ?? 'all' + if (!isPlanProvider(provider)) { + console.error(`\n --provider must be one of: all, claude, codex, cursor; got "${provider}".\n`) + process.exitCode = 1 + return + } + await savePlan({ + id: 'custom', + monthlyUsd, + provider, + resetDay, + setAt: new Date().toISOString(), + }) + console.log(`\n Plan set to custom ($${monthlyUsd}/month, ${provider}, reset day ${resetDay}).`) + console.log(` Config saved to ${getConfigFilePath()}\n`) + return + } + + const preset = getPresetPlan(id) + if (!preset) { + console.error(`\n Unknown preset "${id}".\n`) + process.exitCode = 1 + return + } + + await savePlan({ + ...preset, + resetDay, + setAt: new Date().toISOString(), + }) + console.log(`\n Plan set to ${planDisplayName(preset.id)} ($${preset.monthlyUsd}/month).`) + console.log(` Provider: ${preset.provider}`) + console.log(` Reset day: ${resetDay}`) + console.log(` Config saved to ${getConfigFilePath()}\n`) + }) + +program + .command('optimize') + .description('Find token waste and get exact fixes') + .option('-p, --period ', 'Analysis period: today, week, 30days, month, all', '30days') + .option('--provider ', 'Filter by provider (e.g. claude, gemini, cursor, copilot)', 'all') + .action(async (opts) => { + await loadPricing() + await hydrateCache() + const { range, label } = getDateRange(opts.period) + const projects = await parseAllSessions(range, opts.provider) + await runOptimize(projects, label, range) + }) + +program + .command('compare') + .description('Compare two AI models side-by-side') + .option('-p, --period ', 'Analysis period: today, week, 30days, month, all', 'all') + .option('--provider ', 'Filter by provider (e.g. claude, gemini, cursor, copilot)', 'all') + .action(async (opts) => { + await loadPricing() + await hydrateCache() + const { range } = getDateRange(opts.period) + await renderCompare(range, opts.provider) + }) + +program + .command('models') + .description('Per-model token + cost table, optionally exploded by task type') + .option('-p, --period ', 'Analysis period: today, week, 30days, month, all', '30days') + .option('--from ', 'Custom range start (YYYY-MM-DD)') + .option('--to ', 'Custom range end (YYYY-MM-DD)') + .option('--provider ', 'Filter by provider (e.g. claude, codex, cursor)', 'all') + .option('--task ', 'Filter to one task type (e.g. feature, debugging, refactoring)') + .option('--by-task', 'One row per (provider, model, task) instead of one row per (provider, model)') + .option('--top ', 'Show only the top N rows', (v: string) => parseInt(v, 10)) + .option('--min-cost ', 'Hide rows below this cost threshold', (v: string) => parseFloat(v)) + .option('--no-totals', 'Suppress the footer totals row') + .option('--format ', 'Output format: table, markdown, json, csv', 'table') + .action(async (opts) => { + const { aggregateModels, renderTable, renderMarkdown, renderJson, renderCsv } = await import('./models-report.js') + await loadPricing() + await hydrateCache() + + let range + if (opts.from || opts.to) { + const customRange = parseDateRangeFlags(opts.from, opts.to) + if (!customRange) { + process.stderr.write('codeburn: --from and --to must be valid YYYY-MM-DD dates\n') + process.exit(1) + } + range = customRange + } else { + range = getDateRange(opts.period).range + } + + const projects = await parseAllSessions(range, opts.provider) + const rows = await aggregateModels(projects, { + byTask: !!opts.byTask, + taskFilter: opts.task, + topN: typeof opts.top === 'number' && Number.isFinite(opts.top) ? opts.top : undefined, + minCost: typeof opts.minCost === 'number' && Number.isFinite(opts.minCost) ? opts.minCost : 0.01, + }) + + const fmt = (opts.format ?? 'table').toLowerCase() + if (rows.length === 0 && (fmt === 'table' || fmt === 'markdown')) { + process.stdout.write('No model usage found for the selected period.\n') + return + } + if (fmt === 'json') { + process.stdout.write(renderJson(rows) + '\n') + } else if (fmt === 'csv') { + process.stdout.write(renderCsv(rows, { byTask: !!opts.byTask }) + '\n') + } else if (fmt === 'markdown' || fmt === 'md') { + process.stdout.write(renderMarkdown(rows, { byTask: !!opts.byTask, showTotals: opts.totals !== false }) + '\n') + } else if (fmt === 'table') { + process.stdout.write(renderTable(rows, { byTask: !!opts.byTask, showTotals: opts.totals !== false }) + '\n') + } else { + process.stderr.write(`codeburn: unknown --format "${opts.format}". Choose table, markdown, json, or csv.\n`) + process.exit(1) + } + }) + +program + .command('yield') + .description('Track which AI spend shipped to main vs reverted/abandoned (experimental)') + .option('-p, --period ', 'Analysis period: today, week, 30days, month, all', 'week') + .action(async (opts) => { + const { computeYield, formatYieldSummary } = await import('./yield.js') + await loadPricing() + await hydrateCache() + const { range, label } = getDateRange(opts.period) + console.log(`\n Analyzing yield for ${label}...\n`) + const summary = await computeYield(range, process.cwd()) + console.log(formatYieldSummary(summary)) + }) + +program.parse() diff --git a/src/main.ts b/src/main.ts index 4ebfe337..abc2b0bc 100644 --- a/src/main.ts +++ b/src/main.ts @@ -1,5 +1,6 @@ import { Command } from 'commander' import { installMenubarApp } from './menubar-installer.js' +import { notifyMenubar } from './menubar-socket.js' import { exportCsv, exportJson, type PeriodExport } from './export.js' import { loadPricing, setModelAliases } from './models.js' import { parseAllSessions, filterProjectsByName } from './parser.js' @@ -515,6 +516,7 @@ program const optimize = opts.optimize === false ? null : await scanAndDetect(scanProjects, scanRange) console.log(JSON.stringify(buildMenubarPayload(currentData, providers, optimize, dailyHistory))) + notifyMenubar() return } diff --git a/src/menubar-socket.ts b/src/menubar-socket.ts new file mode 100644 index 00000000..fd874d57 --- /dev/null +++ b/src/menubar-socket.ts @@ -0,0 +1,12 @@ +import { connect } from 'node:net' +import { homedir } from 'node:os' +import { join } from 'node:path' + +const SOCKET_PATH = join(homedir(), '.cache', 'codeburn', 'menubar.sock') + +export function notifyMenubar(): void { + const sock = connect(SOCKET_PATH) + sock.on('error', () => {}) + sock.write('refresh\n') + sock.end() +} From 43765718b35bed122207c060d1190bc21278c8f2 Mon Sep 17 00:00:00 2001 From: Rashid Razak Date: Wed, 13 May 2026 12:18:17 +0800 Subject: [PATCH 17/17] Fix menubar showing empty data after reboot when CLI is installed via fnm/nvm Login-item launches don't source .zshrc, leaving version-manager bin directories (fnm, nvm, volta, asdf) absent from PATH. The menubar's augmentedPath only covered /opt/homebrew/bin and /usr/local/bin, so codeburn was never found after a cold reboot. - Add discoverNodeManagerBinDirs() that dynamically scans for fnm, nvm, volta, and asdf installations and adds the latest Node version's bin directory to PATH - Add PATH logging to DataClient spawn error for easier future diagnosis - Log the swallowed error in hydrateCache() catch block so silent cache-empty failures are visible in stderr - Add scripts/diagnose-menubar-cli.sh for testing restricted-PATH CLI execution without rebuilding the menubar app --- .../CodeBurnMenubar/Data/DataClient.swift | 2 + .../Security/CodeburnCLI.swift | 58 +++++++++++++++ scripts/diagnose-menubar-cli.sh | 73 +++++++++++++++++++ src/cli.ts | 5 +- 4 files changed, 137 insertions(+), 1 deletion(-) create mode 100755 scripts/diagnose-menubar-cli.sh diff --git a/mac/Sources/CodeBurnMenubar/Data/DataClient.swift b/mac/Sources/CodeBurnMenubar/Data/DataClient.swift index 4b0083c0..c3bcdb44 100644 --- a/mac/Sources/CodeBurnMenubar/Data/DataClient.swift +++ b/mac/Sources/CodeBurnMenubar/Data/DataClient.swift @@ -58,6 +58,8 @@ struct DataClient { do { try process.run() } catch { + let path = ProcessInfo.processInfo.environment["PATH"] ?? "(no PATH)" + NSLog("CodeBurn: CLI spawn failed. PATH=%@ error=%@", path, error.localizedDescription) throw DataClientError.spawn(error.localizedDescription) } diff --git a/mac/Sources/CodeBurnMenubar/Security/CodeburnCLI.swift b/mac/Sources/CodeBurnMenubar/Security/CodeburnCLI.swift index 4f4a5f82..88a4e3ca 100644 --- a/mac/Sources/CodeBurnMenubar/Security/CodeburnCLI.swift +++ b/mac/Sources/CodeBurnMenubar/Security/CodeburnCLI.swift @@ -59,6 +59,64 @@ enum CodeburnCLI { for extra in additionalPathEntries where !parts.contains(extra) { parts.append(extra) } + for dir in discoverNodeManagerBinDirs() where !parts.contains(dir) { + parts.append(dir) + } return parts.joined(separator: ":") } + + /// Login-item launches don't source .zshrc, so nvm / fnm / volta / asdf bin + /// directories are absent from PATH. Scan common version-manager locations + /// and add the latest Node version's bin dir so `codeburn` can be found. + private static func discoverNodeManagerBinDirs() -> [String] { + let home = FileManager.default.homeDirectoryForCurrentUser.path + let fm = FileManager.default + + // fnm: ~/.local/share/fnm/node-versions//installation/bin + let fnmVersionsDir = "\(home)/.local/share/fnm/node-versions" + if let latest = latestVersionDir(in: fnmVersionsDir) { + let binDir = "\(fnmVersionsDir)/\(latest)/installation/bin" + if fm.fileExists(atPath: "\(binDir)/node") { + return [binDir] + } + } + + // nvm: ~/.nvm/versions/node//bin + let nvmVersionsDir = "\(home)/.nvm/versions/node" + if let latest = latestVersionDir(in: nvmVersionsDir) { + let binDir = "\(nvmVersionsDir)/\(latest)/bin" + if fm.fileExists(atPath: "\(binDir)/node") { + return [binDir] + } + } + + // volta: ~/.volta/bin (flat, no version dirs) + let voltaBin = "\(home)/.volta/bin" + if fm.fileExists(atPath: "\(voltaBin)/node") { + return [voltaBin] + } + + // asdf: ~/.asdf/shims (flat shim dir) + let asdfShims = "\(home)/.asdf/shims" + if fm.fileExists(atPath: "\(asdfShims)/node") { + return [asdfShims] + } + + return [] + } + + /// Returns the latest version directory name (e.g. "v22.15.0") from a + /// parent directory containing version-named subdirectories. + private static func latestVersionDir(in parent: String) -> String? { + let fm = FileManager.default + var isDir: ObjCBool = false + guard fm.fileExists(atPath: parent, isDirectory: &isDir), isDir.boolValue, + let entries = try? fm.contentsOfDirectory(atPath: parent) else { + return nil + } + return entries + .filter { $0.hasPrefix("v") } + .sorted() + .last + } } diff --git a/scripts/diagnose-menubar-cli.sh b/scripts/diagnose-menubar-cli.sh new file mode 100755 index 00000000..8e171e6d --- /dev/null +++ b/scripts/diagnose-menubar-cli.sh @@ -0,0 +1,73 @@ +#!/bin/bash +# Replicates the menubar's restricted PATH environment to test if the CLI +# can find and run codeburn with the same PATH the menubar provides. +# +# The menubar augments PATH with: /opt/homebrew/bin /usr/local/bin +# The base PATH for a Login Item is typically: /usr/bin:/bin:/usr/sbin:/sbin + +set -euo pipefail + +RESTRICTED_PATH="/usr/bin:/bin:/usr/sbin:/sbin:/opt/homebrew/bin:/usr/local/bin" + +echo "=== Menubar PATH Diagnostic ===" +echo "" +echo "Using restricted PATH: $RESTRICTED_PATH" +echo "" + +# 1. Check if codeburn is found +echo "--- Step 1: Locate codeburn binary ---" +FOUND=$(PATH="$RESTRICTED_PATH" /usr/bin/env which codeburn 2>&1 || true) +if [ -z "$FOUND" ]; then + echo "FAIL: codeburn not found in restricted PATH" + echo "" + echo "Where codeburn actually is:" + /usr/bin/env which -a codeburn 2>/dev/null || echo "(not found anywhere)" + echo "" + echo "Fix: codeburn is installed outside the menubar's PATH. Options:" + echo " 1. Add the install directory to CodeburnCLI.additionalPathEntries" + echo " 2. Symlink codeburn into /usr/local/bin" + exit 1 +fi +echo "OK: codeburn found at: $FOUND" +echo "" + +# 2. Check if node is found (needed for codeburn shell wrapper) +echo "--- Step 2: Locate node binary ---" +NODE_FOUND=$(PATH="$RESTRICTED_PATH" /usr/bin/env which node 2>&1 || true) +if [ -z "$NODE_FOUND" ]; then + echo "WARNING: node not found in restricted PATH" + echo "This may cause codeburn to fail if it's a shell wrapper." + echo "" +else + echo "OK: node found at: $NODE_FOUND" + echo "Node version: $(PATH="$RESTRICTED_PATH" node --version 2>&1 || echo 'failed')" +fi +echo "" + +# 3. Run the command the menubar spawns +echo "--- Step 3: Run menubar-equivalent CLI command ---" +echo "Command: codeburn status --format menubar-json --period today --provider all" +echo "" + +STDERR_FILE=$(mktemp) +trap 'rm -f "$STDERR_FILE"' EXIT + +if PATH="$RESTRICTED_PATH" /usr/bin/env -- codeburn status --format menubar-json --period today --provider all 2>"$STDERR_FILE"; then + echo "" + if [ -s "$STDERR_FILE" ]; then + echo "Warnings/errors on stderr:" + cat "$STDERR_FILE" + fi + echo "" + echo "SUCCESS: CLI ran successfully with restricted PATH." +else + EXIT_CODE=$? + echo "" + echo "FAIL: CLI exited with code $EXIT_CODE" + if [ -s "$STDERR_FILE" ]; then + echo "" + echo "Stderr output:" + cat "$STDERR_FILE" + fi + exit 1 +fi diff --git a/src/cli.ts b/src/cli.ts index abc2b0bc..3efb62ce 100644 --- a/src/cli.ts +++ b/src/cli.ts @@ -32,7 +32,10 @@ async function hydrateCache() { (range) => parseAllSessions(range, 'all'), aggregateProjectsIntoDays, ) - } catch { + } catch (err) { + const message = err instanceof Error ? err.message : String(err) + const stack = err instanceof Error && err.stack ? `\n${err.stack}` : '' + process.stderr.write(`codeburn: hydrateCache failed, returning empty cache: ${message}${stack}\n`) return emptyCache() } }