diff --git a/AGENTS.md b/AGENTS.md index 2a8cbc6..e03eec6 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -1,6 +1,6 @@ # AgentTap - Development Guide -> **IMPORTANT**: Do NOT update this file unless the user explicitly says to. +> **IMPORTANT**: Update this file with key learnings as they are discovered during development. Keep entries brief and actionable. ## Project Overview @@ -86,4 +86,43 @@ groq: api.groq.com - All proxy operations must handle TLS errors gracefully - Traces must be queryable by provider, model, timestamp, and source app +## Rollback & Restore + +**Kill proxy:** `pkill -f AgentTap` or `lsof -ti:8443 | xargs kill -9` +**Remove CA:** `rm -rf ~/Library/Application\ Support/AgentTap/ca/` +**Remove CA from system keychain:** `security delete-certificate -c "AgentTap CA" /Library/Keychains/System.keychain` +**Remove pf rules:** `sudo pfctl -f /etc/pf.conf` (restores defaults) +**Verify no lingering listeners:** `lsof -i :8443 -i :18443` +**Uninstall app:** `rm -rf ~/Library/Application\ Support/AgentTap/` + +## Key Learnings + +### Bun TLS Limitations (CRITICAL) + +Bun's `node:tls` has significant limitations for MITM proxy use cases: + +- **`tls.TLSSocket` wrapping with `isServer: true`**: data events never fire on the wrapped socket +- **`pipe()` between TLS sockets**: drops the TLS ClientHello — data never arrives +- **`SNICallback` on `tls.createServer`**: handshake completes but cleartext data events don't fire on piped connections + +**Working pattern**: Per-domain `tls.createServer` with **static certs** (no SNICallback) + manual ClientHello capture via `once("data")` after sending CONNECT 200 + `net.connect` to loopback TLS server with explicit `write()` forwarding (not pipe). See `proxy-server.ts`. + +### HTTP Response Completion + +HTTP/1.1 keep-alive means upstream connections don't close after sending a response. Never rely on `end`/`close` events for response completion. Instead detect completion by: +- **Content-Length**: track body bytes received vs header value +- **Chunked**: scan for `0\r\n\r\n` terminator +- **SSE**: finalize only on connection close (server closes when stream ends) + +### Dependencies + +- `@peculiar/x509` requires `reflect-metadata` polyfill (`import "reflect-metadata"` at top of cert-generator.ts) — tsyringe dependency +- Use `globalThis.crypto` (Bun built-in WebCrypto), NOT `@peculiar/webcrypto` +- `bun-types` has known `Buffer`/`ArrayBufferLike` type mismatches with strict TS — safe to ignore, works at runtime + +### ElectroBun + +- Tray click handlers are sync — use `.then()/.catch()` for async operations, not `await` +- `exitOnLastWindowClosed: false` required for tray-only apps + *Generated by [LynxPrompt](https://lynxprompt.com) CLI* diff --git a/bun.lock b/bun.lock index 7f4c964..d7f9152 100644 --- a/bun.lock +++ b/bun.lock @@ -4,6 +4,10 @@ "workspaces": { "": { "name": "agenttap", + "dependencies": { + "@peculiar/x509": "^2.0.0", + "reflect-metadata": "^0.2.2", + }, "devDependencies": { "electrobun": "^1.16.0", "typescript": "^5.8.3", @@ -15,6 +19,28 @@ "@malept/cross-spawn-promise": ["@malept/cross-spawn-promise@1.1.1", "", { "dependencies": { "cross-spawn": "^7.0.1" } }, "sha512-RTBGWL5FWQcg9orDOCcp4LvItNzUPcyEU9bwaeJX0rJ1IQxzucC48Y0/sQLp/g6t99IQgAlGIaesJS+gTn7tVQ=="], + "@peculiar/asn1-cms": ["@peculiar/asn1-cms@2.6.1", "", { "dependencies": { "@peculiar/asn1-schema": "^2.6.0", "@peculiar/asn1-x509": "^2.6.1", "@peculiar/asn1-x509-attr": "^2.6.1", "asn1js": "^3.0.6", "tslib": "^2.8.1" } }, "sha512-vdG4fBF6Lkirkcl53q6eOdn3XYKt+kJTG59edgRZORlg/3atWWEReRCx5rYE1ZzTTX6vLK5zDMjHh7vbrcXGtw=="], + + "@peculiar/asn1-csr": ["@peculiar/asn1-csr@2.6.1", "", { "dependencies": { "@peculiar/asn1-schema": "^2.6.0", "@peculiar/asn1-x509": "^2.6.1", "asn1js": "^3.0.6", "tslib": "^2.8.1" } }, "sha512-WRWnKfIocHyzFYQTka8O/tXCiBquAPSrRjXbOkHbO4qdmS6loffCEGs+rby6WxxGdJCuunnhS2duHURhjyio6w=="], + + "@peculiar/asn1-ecc": ["@peculiar/asn1-ecc@2.6.1", "", { "dependencies": { "@peculiar/asn1-schema": "^2.6.0", "@peculiar/asn1-x509": "^2.6.1", "asn1js": "^3.0.6", "tslib": "^2.8.1" } }, "sha512-+Vqw8WFxrtDIN5ehUdvlN2m73exS2JVG0UAyfVB31gIfor3zWEAQPD+K9ydCxaj3MLen9k0JhKpu9LqviuCE1g=="], + + "@peculiar/asn1-pfx": ["@peculiar/asn1-pfx@2.6.1", "", { "dependencies": { "@peculiar/asn1-cms": "^2.6.1", "@peculiar/asn1-pkcs8": "^2.6.1", "@peculiar/asn1-rsa": "^2.6.1", "@peculiar/asn1-schema": "^2.6.0", "asn1js": "^3.0.6", "tslib": "^2.8.1" } }, "sha512-nB5jVQy3MAAWvq0KY0R2JUZG8bO/bTLpnwyOzXyEh/e54ynGTatAR+csOnXkkVD9AFZ2uL8Z7EV918+qB1qDvw=="], + + "@peculiar/asn1-pkcs8": ["@peculiar/asn1-pkcs8@2.6.1", "", { "dependencies": { "@peculiar/asn1-schema": "^2.6.0", "@peculiar/asn1-x509": "^2.6.1", "asn1js": "^3.0.6", "tslib": "^2.8.1" } }, "sha512-JB5iQ9Izn5yGMw3ZG4Nw3Xn/hb/G38GYF3lf7WmJb8JZUydhVGEjK/ZlFSWhnlB7K/4oqEs8HnfFIKklhR58Tw=="], + + "@peculiar/asn1-pkcs9": ["@peculiar/asn1-pkcs9@2.6.1", "", { "dependencies": { "@peculiar/asn1-cms": "^2.6.1", "@peculiar/asn1-pfx": "^2.6.1", "@peculiar/asn1-pkcs8": "^2.6.1", "@peculiar/asn1-schema": "^2.6.0", "@peculiar/asn1-x509": "^2.6.1", "@peculiar/asn1-x509-attr": "^2.6.1", "asn1js": "^3.0.6", "tslib": "^2.8.1" } }, "sha512-5EV8nZoMSxeWmcxWmmcolg22ojZRgJg+Y9MX2fnE2bGRo5KQLqV5IL9kdSQDZxlHz95tHvIq9F//bvL1OeNILw=="], + + "@peculiar/asn1-rsa": ["@peculiar/asn1-rsa@2.6.1", "", { "dependencies": { "@peculiar/asn1-schema": "^2.6.0", "@peculiar/asn1-x509": "^2.6.1", "asn1js": "^3.0.6", "tslib": "^2.8.1" } }, "sha512-1nVMEh46SElUt5CB3RUTV4EG/z7iYc7EoaDY5ECwganibQPkZ/Y2eMsTKB/LeyrUJ+W/tKoD9WUqIy8vB+CEdA=="], + + "@peculiar/asn1-schema": ["@peculiar/asn1-schema@2.6.0", "", { "dependencies": { "asn1js": "^3.0.6", "pvtsutils": "^1.3.6", "tslib": "^2.8.1" } }, "sha512-xNLYLBFTBKkCzEZIw842BxytQQATQv+lDTCEMZ8C196iJcJJMBUZxrhSTxLaohMyKK8QlzRNTRkUmanucnDSqg=="], + + "@peculiar/asn1-x509": ["@peculiar/asn1-x509@2.6.1", "", { "dependencies": { "@peculiar/asn1-schema": "^2.6.0", "asn1js": "^3.0.6", "pvtsutils": "^1.3.6", "tslib": "^2.8.1" } }, "sha512-O9jT5F1A2+t3r7C4VT7LYGXqkGLK7Kj1xFpz7U0isPrubwU5PbDoyYtx6MiGst29yq7pXN5vZbQFKRCP+lLZlA=="], + + "@peculiar/asn1-x509-attr": ["@peculiar/asn1-x509-attr@2.6.1", "", { "dependencies": { "@peculiar/asn1-schema": "^2.6.0", "@peculiar/asn1-x509": "^2.6.1", "asn1js": "^3.0.6", "tslib": "^2.8.1" } }, "sha512-tlW6cxoHwgcQghnJwv3YS+9OO1737zgPogZ+CgWRUK4roEwIPzRH4JEiG770xe5HX2ATfCpmX60gurfWIF9dcQ=="], + + "@peculiar/x509": ["@peculiar/x509@2.0.0", "", { "dependencies": { "@peculiar/asn1-cms": "^2.6.0", "@peculiar/asn1-csr": "^2.6.0", "@peculiar/asn1-ecc": "^2.6.0", "@peculiar/asn1-pkcs9": "^2.6.0", "@peculiar/asn1-rsa": "^2.6.0", "@peculiar/asn1-schema": "^2.6.0", "@peculiar/asn1-x509": "^2.6.0", "pvtsutils": "^1.3.6", "tslib": "^2.8.1", "tsyringe": "^4.10.0" } }, "sha512-r10lkuy6BNfRmyYdRAfgu6dq0HOmyIV2OLhXWE3gDEPBdX1b8miztJVyX/UxWhLwemNyDP3CLZHpDxDwSY0xaA=="], + "@tootallnate/quickjs-emscripten": ["@tootallnate/quickjs-emscripten@0.23.0", "", {}, "sha512-C5Mc6rdnsaJDjO3UpGW/CQTHtCKaYlScZTly4JIu97Jxo/odCiH0ITnDXSJPTOrEKk/ycSZ0AOgTmkDtkOsvIA=="], "@types/bun": ["@types/bun@1.3.12", "", { "dependencies": { "bun-types": "1.3.12" } }, "sha512-DBv81elK+/VSwXHDlnH3Qduw+KxkTIWi7TXkAeh24zpi5l0B2kUg9Ga3tb4nJaPcOFswflgi/yAvMVBPrxMB+A=="], @@ -23,6 +49,8 @@ "agent-base": ["agent-base@7.1.4", "", {}, "sha512-MnA+YT8fwfJPgBx3m60MNqakm30XOkyIoH1y6huTQvC0PwZG7ki8NacLBcrPbNoo8vEZy7Jpuk7+jMO+CUovTQ=="], + "asn1js": ["asn1js@3.0.7", "", { "dependencies": { "pvtsutils": "^1.3.6", "pvutils": "^1.1.3", "tslib": "^2.8.1" } }, "sha512-uLvq6KJu04qoQM6gvBfKFjlh6Gl0vOKQuR5cJMDHQkmwfMOQeN3F3SHCv9SNYSL+CRoHvOGFfllDlVz03GQjvQ=="], + "ast-types": ["ast-types@0.13.4", "", { "dependencies": { "tslib": "^2.0.1" } }, "sha512-x1FCFnFifvYDDzTaLII71vG5uvDwgtmDTEVWAxrgeiR8VjMONcCXJx7E+USjDtHlwFmt9MysbqgF9b9Vjr6w+w=="], "basic-ftp": ["basic-ftp@5.2.2", "", {}, "sha512-1tDrzKsdCg70WGvbFss/ulVAxupNauGnOlgpyjKzeQxzyllBLS0CGLV7tjIXTK3ZQA9/FBEm9qyFFN1bciA6pw=="], @@ -85,8 +113,14 @@ "proxy-from-env": ["proxy-from-env@1.1.0", "", {}, "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg=="], + "pvtsutils": ["pvtsutils@1.3.6", "", { "dependencies": { "tslib": "^2.8.1" } }, "sha512-PLgQXQ6H2FWCaeRak8vvk1GW462lMxB5s3Jm673N82zI4vqtVUPuZdffdZbPDFRoU8kAhItWFtPCWiPpp4/EDg=="], + + "pvutils": ["pvutils@1.1.5", "", {}, "sha512-KTqnxsgGiQ6ZAzZCVlJH5eOjSnvlyEgx1m8bkRJfOhmGRqfo5KLvmAlACQkrjEtOQ4B7wF9TdSLIs9O90MX9xA=="], + "rcedit": ["rcedit@4.0.1", "", { "dependencies": { "cross-spawn-windows-exe": "^1.1.0" } }, "sha512-bZdaQi34krFWhrDn+O53ccBDw0MkAT2Vhu75SqhtvhQu4OPyFM4RoVheyYiVQYdjhUi6EJMVWQ0tR6bCIYVkUg=="], + "reflect-metadata": ["reflect-metadata@0.2.2", "", {}, "sha512-urBwgfrvVP/eAyXx4hluJivBKzuEbSQs9rKWCrCkbSxNv8mxPcUZKeuoF3Uy4mJl3Lwprp6yy5/39VWigZ4K6Q=="], + "shebang-command": ["shebang-command@2.0.0", "", { "dependencies": { "shebang-regex": "^3.0.0" } }, "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA=="], "shebang-regex": ["shebang-regex@3.0.0", "", {}, "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A=="], @@ -103,8 +137,12 @@ "tslib": ["tslib@2.8.1", "", {}, "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w=="], + "tsyringe": ["tsyringe@4.10.0", "", { "dependencies": { "tslib": "^1.9.3" } }, "sha512-axr3IdNuVIxnaK5XGEUFTu3YmAQ6lllgrvqfEoR16g/HGnYY/6We4oWENtAnzK6/LpJ2ur9PAb80RBt7/U4ugw=="], + "typescript": ["typescript@5.9.3", "", { "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" } }, "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw=="], "which": ["which@2.0.2", "", { "dependencies": { "isexe": "^2.0.0" }, "bin": { "node-which": "./bin/node-which" } }, "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA=="], + + "tsyringe/tslib": ["tslib@1.14.1", "", {}, "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg=="], } } diff --git a/electrobun.config.ts b/electrobun.config.ts index cadee29..fc8535a 100644 --- a/electrobun.config.ts +++ b/electrobun.config.ts @@ -31,6 +31,8 @@ export default { "src/views/mainview/index.css": "views/mainview/index.css", "src/views/mainview/assets/tray-icon-template.png": "views/assets/tray-icon-template.png", + "src/views/mainview/assets/tray-icon-template@2x.png": + "views/assets/tray-icon-template@2x.png", }, useAsar: false, diff --git a/package.json b/package.json index a30b4e8..478f20b 100644 --- a/package.json +++ b/package.json @@ -18,5 +18,9 @@ "devDependencies": { "electrobun": "^1.16.0", "typescript": "^5.8.3" + }, + "dependencies": { + "@peculiar/x509": "^2.0.0", + "reflect-metadata": "^0.2.2" } } diff --git a/src/bun/ca/ca-manager.ts b/src/bun/ca/ca-manager.ts new file mode 100644 index 0000000..66bc287 --- /dev/null +++ b/src/bun/ca/ca-manager.ts @@ -0,0 +1,83 @@ +import { existsSync } from "node:fs"; +import { mkdir } from "node:fs/promises"; +import { join } from "node:path"; +import { generateRootCA, type KeyCertPair } from "./cert-generator"; + +export type CAStatus = + | "not-generated" + | "generated" + | "installed" + | "expired"; + +const CA_DIR = join( + process.env.HOME ?? "~", + "Library", + "Application Support", + "AgentTap", + "ca", +); +const CA_KEY_PATH = join(CA_DIR, "ca-key.pem"); +const CA_CERT_PATH = join(CA_DIR, "ca.pem"); + +let loaded: KeyCertPair | null = null; + +export function getCAPaths() { + return { keyPath: CA_KEY_PATH, certPath: CA_CERT_PATH, dir: CA_DIR }; +} + +export async function ensureCA(): Promise { + if (loaded) return loaded; + + if (existsSync(CA_KEY_PATH) && existsSync(CA_CERT_PATH)) { + const key = await Bun.file(CA_KEY_PATH).text(); + const cert = await Bun.file(CA_CERT_PATH).text(); + + if (isCertExpired(cert)) { + console.log("[CA] Certificate expired, regenerating..."); + return regenerateCA(); + } + + loaded = { key, cert }; + console.log("[CA] Loaded existing CA from", CA_DIR); + return loaded; + } + + return regenerateCA(); +} + +export async function regenerateCA(): Promise { + await mkdir(CA_DIR, { recursive: true }); + + console.log("[CA] Generating new root CA..."); + const pair = await generateRootCA(); + + await Bun.write(CA_KEY_PATH, pair.key, { mode: 0o600 }); + await Bun.write(CA_CERT_PATH, pair.cert); + + loaded = pair; + console.log("[CA] Root CA written to", CA_DIR); + return loaded; +} + +export function getCA(): KeyCertPair | null { + return loaded; +} + +export function getCAStatus(): CAStatus { + if (!loaded) { + if (!existsSync(CA_CERT_PATH)) return "not-generated"; + } + if (loaded && isCertExpired(loaded.cert)) return "expired"; + if (loaded) return "generated"; + return "not-generated"; +} + +function isCertExpired(pem: string): boolean { + try { + const crypto = require("node:crypto"); + const cert = new crypto.X509Certificate(pem); + return new Date(cert.validTo) < new Date(); + } catch { + return false; + } +} diff --git a/src/bun/ca/cert-cache.ts b/src/bun/ca/cert-cache.ts new file mode 100644 index 0000000..d00bbfe --- /dev/null +++ b/src/bun/ca/cert-cache.ts @@ -0,0 +1,56 @@ +import type { KeyCertPair } from "./cert-generator"; +import { generateLeafCert } from "./cert-generator"; + +const MAX_CACHE_SIZE = 100; + +interface CacheEntry { + pair: KeyCertPair; + expiresAt: number; + lastUsed: number; +} + +const cache = new Map(); + +export async function getOrGenerateLeafCert( + domain: string, + caCert: string, + caKey: string, +): Promise { + const existing = cache.get(domain); + const now = Date.now(); + + if (existing && existing.expiresAt > now) { + existing.lastUsed = now; + return existing.pair; + } + + const pair = await generateLeafCert(domain, caCert, caKey); + + if (cache.size >= MAX_CACHE_SIZE) { + evictLRU(); + } + + cache.set(domain, { + pair, + expiresAt: now + 25 * 86400_000, // 25 days (leaf certs valid 30) + lastUsed: now, + }); + + return pair; +} + +export function clearCache(): void { + cache.clear(); +} + +function evictLRU(): void { + let oldest: string | null = null; + let oldestTime = Infinity; + for (const [domain, entry] of cache) { + if (entry.lastUsed < oldestTime) { + oldestTime = entry.lastUsed; + oldest = domain; + } + } + if (oldest) cache.delete(oldest); +} diff --git a/src/bun/ca/cert-generator.ts b/src/bun/ca/cert-generator.ts new file mode 100644 index 0000000..9a79c44 --- /dev/null +++ b/src/bun/ca/cert-generator.ts @@ -0,0 +1,122 @@ +import "reflect-metadata"; +import * as x509 from "@peculiar/x509"; + +const crypto = globalThis.crypto; +x509.cryptoProvider.set(crypto as unknown as Crypto); + +const CA_VALIDITY_YEARS = 1; +const LEAF_VALIDITY_DAYS = 30; + +export interface KeyCertPair { + key: string; + cert: string; +} + +async function exportKeyPem(key: CryptoKey): Promise { + const exported = await crypto.subtle.exportKey("pkcs8", key); + const b64 = Buffer.from(exported).toString("base64"); + const lines = b64.match(/.{1,64}/g) ?? []; + return `-----BEGIN PRIVATE KEY-----\n${lines.join("\n")}\n-----END PRIVATE KEY-----\n`; +} + +export async function generateRootCA(): Promise { + const alg: RsaHashedKeyGenParams = { + name: "RSASSA-PKCS1-v1_5", + hash: "SHA-256", + modulusLength: 4096, + publicExponent: new Uint8Array([1, 0, 1]), + }; + + const keys = await crypto.subtle.generateKey(alg, true, ["sign", "verify"]); + + const cert = await x509.X509CertificateGenerator.createSelfSigned({ + serialNumber: randomSerial(), + name: "CN=AgentTap CA, O=AgentTap, C=US", + notBefore: new Date(), + notAfter: yearFromNow(CA_VALIDITY_YEARS), + keys, + signingAlgorithm: alg, + extensions: [ + new x509.BasicConstraintsExtension(true, undefined, true), + new x509.KeyUsagesExtension( + x509.KeyUsageFlags.keyCertSign | x509.KeyUsageFlags.cRLSign, + true, + ), + await x509.SubjectKeyIdentifierExtension.create(keys.publicKey), + ], + }); + + return { + key: await exportKeyPem(keys.privateKey), + cert: cert.toString("pem"), + }; +} + +export async function generateLeafCert( + domain: string, + caCert: string, + caKey: string, +): Promise { + const ecAlg: EcKeyGenParams = { name: "ECDSA", namedCurve: "P-256" }; + const leafKeys = await crypto.subtle.generateKey(ecAlg, true, [ + "sign", + "verify", + ]); + + const caKeyImported = await crypto.subtle.importKey( + "pkcs8", + pemToDer(caKey), + { name: "RSASSA-PKCS1-v1_5", hash: "SHA-256" }, + false, + ["sign"], + ); + + const caCertParsed = new x509.X509Certificate(caCert); + const now = new Date(); + const notAfter = new Date(now.getTime() + LEAF_VALIDITY_DAYS * 86400_000); + + const cert = await x509.X509CertificateGenerator.create({ + serialNumber: randomSerial(), + subject: `CN=${domain}`, + issuer: caCertParsed.subject, + notBefore: now, + notAfter, + signingKey: caKeyImported, + publicKey: leafKeys.publicKey, + signingAlgorithm: { name: "RSASSA-PKCS1-v1_5" }, + extensions: [ + new x509.BasicConstraintsExtension(false), + new x509.KeyUsagesExtension(x509.KeyUsageFlags.digitalSignature, true), + new x509.ExtendedKeyUsageExtension(["1.3.6.1.5.5.7.3.1"]), // serverAuth + new x509.SubjectAlternativeNameExtension([ + { type: "dns", value: domain }, + ]), + ], + }); + + return { + key: await exportKeyPem(leafKeys.privateKey), + cert: cert.toString("pem"), + }; +} + +function randomSerial(): string { + const bytes = crypto.getRandomValues(new Uint8Array(16)); + return Array.from(bytes) + .map((b) => b.toString(16).padStart(2, "0")) + .join(""); +} + +function yearFromNow(years: number): Date { + const d = new Date(); + d.setFullYear(d.getFullYear() + years); + return d; +} + +function pemToDer(pem: string): ArrayBuffer { + const b64 = pem + .replace(/-----BEGIN [\w\s]+-----/, "") + .replace(/-----END [\w\s]+-----/, "") + .replace(/\s/g, ""); + return Buffer.from(b64, "base64").buffer; +} diff --git a/src/bun/capture/trace-emitter.ts b/src/bun/capture/trace-emitter.ts new file mode 100644 index 0000000..a89f3b0 --- /dev/null +++ b/src/bun/capture/trace-emitter.ts @@ -0,0 +1,16 @@ +import { EventEmitter } from "node:events"; +import type { CapturedTrace } from "./types"; + +class TraceEmitter extends EventEmitter { + emit(event: "trace", trace: CapturedTrace): boolean; + emit(event: string, ...args: unknown[]): boolean { + return super.emit(event, ...args); + } + + on(event: "trace", listener: (trace: CapturedTrace) => void): this; + on(event: string, listener: (...args: unknown[]) => void): this { + return super.on(event, listener); + } +} + +export const traceEmitter = new TraceEmitter(); diff --git a/src/bun/capture/types.ts b/src/bun/capture/types.ts new file mode 100644 index 0000000..39a163a --- /dev/null +++ b/src/bun/capture/types.ts @@ -0,0 +1,35 @@ +export interface CapturedRequest { + timestamp: number; + method: string; + url: string; + headers: Record; + body: string | null; +} + +export interface CapturedResponse { + status: number; + statusText: string; + headers: Record; + body: string | null; + chunks?: SSEChunk[]; + isStreaming: boolean; +} + +export interface SSEChunk { + timestamp: number; + event?: string; + data: string; + id?: string; +} + +export interface CapturedTrace { + id: string; + provider: string; + domain: string; + model?: string; + request: CapturedRequest; + response: CapturedResponse; + durationMs: number; + tokensIn?: number; + tokensOut?: number; +} diff --git a/src/bun/index.ts b/src/bun/index.ts index 90a2aa5..a520ade 100644 --- a/src/bun/index.ts +++ b/src/bun/index.ts @@ -1,11 +1,16 @@ import { Tray } from "electrobun/bun"; import { type AppState, type CaptureStatus, DEFAULT_PROVIDERS, TrayActions } from "../shared/types"; import { NativeBridge } from "./native-bridge"; +import { ensureCA, getCAStatus, getCAPaths } from "./ca/ca-manager"; +import { startProxy, stopProxy, isProxyRunning } from "./proxy/proxy-server"; +import { traceEmitter } from "./capture/trace-emitter"; // ── App State ────────────────────────────────────────────────────────────── const state: AppState = { captureStatus: "inactive", + caStatus: "not-generated", + proxyStatus: "stopped", providers: DEFAULT_PROVIDERS.map(p => ({ ...p })), proxyPort: 8443, tracesCount: 0, @@ -14,7 +19,7 @@ const state: AppState = { // ── System Tray ──────────────────────────────────────────────────────────── const tray = new Tray({ - title: "AgentTap", + title: "", image: "views://assets/tray-icon-template.png", template: true, width: 22, @@ -67,6 +72,18 @@ function buildTrayMenu() { action: "status", enabled: false, }, + { + type: "normal", + label: `CA: ${state.caStatus}`, + action: "ca-status", + enabled: false, + }, + { + type: "normal", + label: `Proxy: 127.0.0.1:${state.proxyPort}`, + action: "proxy-port", + enabled: false, + }, { type: "normal", label: `Traces: ${state.tracesCount}`, @@ -96,14 +113,25 @@ tray.on("tray-clicked", (e) => { if (action === TrayActions.TOGGLE_CAPTURE) { if (state.captureStatus === "active") { + stopProxy(); state.captureStatus = "inactive"; + state.proxyStatus = "stopped"; console.log("[AgentTap] Capture stopped"); + buildTrayMenu(); } else { - state.captureStatus = "active"; - console.log("[AgentTap] Capture started"); + state.proxyStatus = "starting"; + startProxy(state.proxyPort).then(() => { + state.captureStatus = "active"; + state.proxyStatus = "running"; + console.log("[AgentTap] Capture started on port", state.proxyPort); + buildTrayMenu(); + }).catch((err: unknown) => { + state.captureStatus = "error"; + state.proxyStatus = "error"; + console.error("[AgentTap] Failed to start proxy:", err); + buildTrayMenu(); + }); } - tray.setTitle(state.captureStatus === "active" ? "AgentTap (ON)" : "AgentTap"); - buildTrayMenu(); return; } @@ -126,6 +154,7 @@ tray.on("tray-clicked", (e) => { if (action === TrayActions.QUIT) { console.log("[AgentTap] Shutting down..."); + stopProxy(); process.exit(0); } }); @@ -133,7 +162,7 @@ tray.on("tray-clicked", (e) => { // ── Startup ──────────────────────────────────────────────────────────────── async function main() { - console.log("[AgentTap] Starting v0.1.0..."); + console.log("[AgentTap] Starting v0.2.0..."); // Test FFI bridge const bridge = new NativeBridge(); @@ -144,6 +173,29 @@ async function main() { console.log("[AgentTap] Native bridge not available (running without native features)"); } + // Initialize CA + try { + await ensureCA(); + state.caStatus = getCAStatus(); + const { certPath } = getCAPaths(); + console.log(`[AgentTap] CA ready (${state.caStatus}). Cert: ${certPath}`); + } catch (err) { + console.error("[AgentTap] CA initialization failed:", err); + state.caStatus = "not-generated"; + } + + // Listen for captured traces + traceEmitter.on("trace", (trace) => { + state.tracesCount++; + const tokens = trace.tokensIn || trace.tokensOut + ? ` (${trace.tokensIn ?? "?"}→${trace.tokensOut ?? "?"} tokens)` + : ""; + console.log( + `[Trace] ${trace.provider} ${trace.model ?? "unknown"} ${trace.request.method} ${trace.request.url} → ${trace.response.status} ${trace.durationMs}ms${tokens}`, + ); + buildTrayMenu(); + }); + // Build initial tray menu buildTrayMenu(); diff --git a/src/bun/proxy/http-parser.ts b/src/bun/proxy/http-parser.ts new file mode 100644 index 0000000..ec91d9c --- /dev/null +++ b/src/bun/proxy/http-parser.ts @@ -0,0 +1,120 @@ +/** + * Minimal HTTP/1.1 parser for proxy request/response extraction. + */ + +export interface ParsedRequest { + method: string; + path: string; + httpVersion: string; + headers: Record; + headerEndIndex: number; +} + +export interface ParsedResponse { + status: number; + statusText: string; + httpVersion: string; + headers: Record; + headerEndIndex: number; +} + +const HEADER_END = Buffer.from("\r\n\r\n"); + +export function findHeaderEnd(buf: Buffer): number { + const idx = buf.indexOf(HEADER_END); + return idx === -1 ? -1 : idx + 4; +} + +export function parseRequestHead(buf: Buffer): ParsedRequest | null { + const endIdx = findHeaderEnd(buf); + if (endIdx === -1) return null; + + const headStr = buf.subarray(0, endIdx).toString("latin1"); + const lines = headStr.split("\r\n"); + const requestLine = lines[0]; + if (!requestLine) return null; + + const parts = requestLine.split(" "); + if (parts.length < 3) return null; + + const headers: Record = {}; + for (let i = 1; i < lines.length; i++) { + const line = lines[i]; + if (!line) continue; + const colonIdx = line.indexOf(":"); + if (colonIdx === -1) continue; + const key = line.substring(0, colonIdx).trim().toLowerCase(); + const value = line.substring(colonIdx + 1).trim(); + headers[key] = value; + } + + return { + method: parts[0], + path: parts[1], + httpVersion: parts[2], + headers, + headerEndIndex: endIdx, + }; +} + +export function parseResponseHead(buf: Buffer): ParsedResponse | null { + const endIdx = findHeaderEnd(buf); + if (endIdx === -1) return null; + + const headStr = buf.subarray(0, endIdx).toString("latin1"); + const lines = headStr.split("\r\n"); + const statusLine = lines[0]; + if (!statusLine) return null; + + const firstSpace = statusLine.indexOf(" "); + const secondSpace = statusLine.indexOf(" ", firstSpace + 1); + if (firstSpace === -1) return null; + + const httpVersion = statusLine.substring(0, firstSpace); + const status = parseInt(statusLine.substring(firstSpace + 1, secondSpace === -1 ? undefined : secondSpace), 10); + const statusText = secondSpace === -1 ? "" : statusLine.substring(secondSpace + 1); + + const headers: Record = {}; + for (let i = 1; i < lines.length; i++) { + const line = lines[i]; + if (!line) continue; + const colonIdx = line.indexOf(":"); + if (colonIdx === -1) continue; + const key = line.substring(0, colonIdx).trim().toLowerCase(); + const value = line.substring(colonIdx + 1).trim(); + headers[key] = value; + } + + return { + status, + statusText, + httpVersion, + headers, + headerEndIndex: endIdx, + }; +} + +/** + * Decode a chunked transfer-encoded body. + */ +export function dechunk(buf: Buffer): Buffer { + const chunks: Buffer[] = []; + let offset = 0; + + while (offset < buf.length) { + const lineEnd = buf.indexOf(Buffer.from("\r\n"), offset); + if (lineEnd === -1) break; + + const sizeStr = buf.subarray(offset, lineEnd).toString("ascii").split(";")[0]; + const chunkSize = parseInt(sizeStr, 16); + if (isNaN(chunkSize) || chunkSize === 0) break; + + offset = lineEnd + 2; + if (offset + chunkSize > buf.length) break; + + chunks.push(buf.subarray(offset, offset + chunkSize)); + offset += chunkSize + 2; // skip chunk data + \r\n + } + + return Buffer.concat(chunks); +} diff --git a/src/bun/proxy/proxy-server.ts b/src/bun/proxy/proxy-server.ts new file mode 100644 index 0000000..af5622a --- /dev/null +++ b/src/bun/proxy/proxy-server.ts @@ -0,0 +1,308 @@ +import * as tls from "node:tls"; +import * as net from "node:net"; +import { randomUUID } from "node:crypto"; +import { getOrGenerateLeafCert } from "../ca/cert-cache"; +import { getCA } from "../ca/ca-manager"; +import { parseRequestHead, parseResponseHead, dechunk } from "./http-parser"; +import { parseSSEStream, reassembleSSE } from "../sse/sse-reassembler"; +import { traceEmitter } from "../capture/trace-emitter"; +import type { CapturedRequest, CapturedResponse, CapturedTrace } from "../capture/types"; +import { DEFAULT_PROVIDERS } from "../../shared/types"; + +const DEFAULT_PORT = 8443; +const UPSTREAM_TIMEOUT = 30_000; + +let connectProxy: net.Server | null = null; + +// Per-domain TLS terminators (static cert per domain, avoids Bun SNICallback issues) +const domainServers = new Map(); + +export function isProxyRunning(): boolean { + return connectProxy !== null; +} + +async function getOrCreateDomainTLS(domain: string): Promise { + const existing = domainServers.get(domain); + if (existing) return existing.port; + + const ca = getCA()!; + const leaf = await getOrGenerateLeafCert(domain, ca.cert, ca.key); + + const server = tls.createServer({ key: leaf.key, cert: leaf.cert }, (socket) => { + handleDecryptedConnection(socket, domain); + }); + + server.on("tlsClientError", () => {}); + server.on("error", () => {}); + + const port = await new Promise((resolve) => { + server.listen(0, "127.0.0.1", () => { + const addr = server.address() as net.AddressInfo; + resolve(addr.port); + }); + }); + + domainServers.set(domain, { server, port }); + return port; +} + +export async function startProxy(port = DEFAULT_PORT): Promise { + const ca = getCA(); + if (!ca) throw new Error("CA not initialized — call ensureCA() first"); + + connectProxy = net.createServer((clientSocket) => { + clientSocket.on("error", () => {}); + + clientSocket.once("data", async (chunk) => { + const headStr = chunk.toString("latin1"); + + if (headStr.startsWith("CONNECT ")) { + const target = headStr.split("\r\n")[0].split(" ")[1]; + const domain = target.split(":")[0]; + + // Get or create a per-domain TLS terminator + const tlsPort = await getOrCreateDomainTLS(domain); + + // Send 200 to establish tunnel + clientSocket.write("HTTP/1.1 200 Connection Established\r\n\r\n"); + + // Wait for the TLS ClientHello, then bridge to the domain's TLS server + clientSocket.once("data", (clientHello) => { + const local = net.connect({ host: "127.0.0.1", port: tlsPort }, () => { + // Forward the ClientHello + local.write(clientHello); + + // Bidirectional relay for the rest of the TLS handshake + data + clientSocket.on("data", (c) => local.write(c)); + local.on("data", (c) => clientSocket.write(c)); + local.on("end", () => clientSocket.end()); + clientSocket.on("end", () => local.end()); + }); + + local.on("error", () => clientSocket.destroy()); + clientSocket.on("error", () => local.destroy()); + }); + } else { + clientSocket.end("HTTP/1.1 400 Bad Request\r\n\r\nOnly CONNECT supported\r\n"); + } + }); + }); + + connectProxy.on("error", (err) => { + console.error("[Proxy] Error:", err.message); + }); + + await new Promise((resolve) => { + connectProxy!.listen(port, "127.0.0.1", () => { + console.log(`[Proxy] CONNECT proxy on 127.0.0.1:${port}`); + resolve(); + }); + }); + + console.log("[Proxy] Started"); +} + +// ── Decrypted HTTP handling ────────────────────────────────────────────── + +function handleDecryptedConnection(socket: net.Socket, domain: string): void { + const requestChunks: Buffer[] = []; + const startTime = Date.now(); + + const onData = (chunk: Buffer) => { + requestChunks.push(chunk); + + const fullBuf = Buffer.concat(requestChunks); + const parsed = parseRequestHead(fullBuf); + if (!parsed) return; + + const contentLength = parseInt(parsed.headers["content-length"] ?? "0", 10); + const bodyReceived = fullBuf.length - parsed.headerEndIndex; + if (bodyReceived < contentLength) return; + + socket.removeListener("data", onData); + + const requestBody = contentLength > 0 + ? fullBuf.subarray(parsed.headerEndIndex, parsed.headerEndIndex + contentLength).toString("utf-8") + : null; + + const capturedRequest: CapturedRequest = { + timestamp: startTime, + method: parsed.method, + url: `https://${domain}${parsed.path}`, + headers: parsed.headers, + body: requestBody, + }; + + relayToUpstream(socket, domain, fullBuf, capturedRequest, startTime); + }; + + socket.on("data", onData); + socket.on("error", () => {}); +} + +function relayToUpstream( + clientSocket: net.Socket, + domain: string, + requestBuf: Buffer, + capturedRequest: CapturedRequest, + startTime: number, +): void { + const upstream = tls.connect({ + host: domain, + port: 443, + servername: domain, + timeout: UPSTREAM_TIMEOUT, + }); + + upstream.on("secureConnect", () => { + upstream.write(requestBuf); + }); + + const responseChunks: Buffer[] = []; + let headersParsed = false; + let isSSE = false; + let isChunked = false; + let contentLength = -1; + let headerEndIndex = 0; + let finalized = false; + + const CHUNKED_TERMINATOR = Buffer.from("0\r\n\r\n"); + + upstream.on("data", (chunk: Buffer) => { + responseChunks.push(chunk); + clientSocket.write(chunk); + + if (!headersParsed) { + const fullBuf = Buffer.concat(responseChunks); + const parsed = parseResponseHead(fullBuf); + if (parsed) { + headersParsed = true; + headerEndIndex = parsed.headerEndIndex; + isSSE = (parsed.headers["content-type"] ?? "").includes("text/event-stream"); + isChunked = (parsed.headers["transfer-encoding"] ?? "").includes("chunked"); + contentLength = parseInt(parsed.headers["content-length"] ?? "-1", 10); + } + } + + // Check response completeness (skip for SSE — finalize on close) + if (headersParsed && !isSSE) { + const totalBytes = responseChunks.reduce((s, c) => s + c.length, 0); + const bodyBytes = totalBytes - headerEndIndex; + + if (contentLength >= 0 && bodyBytes >= contentLength) { + finalize(); + upstream.destroy(); + } else if (isChunked) { + const fullBuf = Buffer.concat(responseChunks as unknown as Uint8Array[]); + const bodyBuf = fullBuf.subarray(headerEndIndex); + if (bodyBuf.includes(CHUNKED_TERMINATOR)) { + finalize(); + upstream.destroy(); + } + } + } + }); + + const finalize = () => { + if (finalized) return; + finalized = true; + + const fullBuf = Buffer.concat(responseChunks); + if (fullBuf.length === 0) return; + const parsed = parseResponseHead(fullBuf); + if (!parsed) return; + + let bodyBuf = fullBuf.subarray(parsed.headerEndIndex); + if (isChunked) bodyBuf = dechunk(bodyBuf); + const bodyStr = bodyBuf.toString("utf-8"); + + const capturedResponse: CapturedResponse = { + status: parsed.status, + statusText: parsed.statusText, + headers: parsed.headers, + body: bodyStr, + isStreaming: isSSE, + }; + + const trace = buildTrace(domain, capturedRequest, capturedResponse, startTime); + + if (isSSE) { + const sseChunks = parseSSEStream(bodyStr); + const reassembled = reassembleSSE(sseChunks, domain); + capturedResponse.chunks = sseChunks; + trace.tokensIn = reassembled.tokensIn; + trace.tokensOut = reassembled.tokensOut; + } else { + extractTokensFromBody(bodyStr, domain, trace); + } + + traceEmitter.emit("trace", trace); + }; + + upstream.on("end", () => { finalize(); clientSocket.end(); }); + upstream.on("close", () => { finalize(); clientSocket.destroy(); }); + upstream.on("error", (err) => { + console.error(`[Proxy] Upstream error for ${domain}:`, err.message); + clientSocket.end(); + }); + upstream.on("timeout", () => { + console.error(`[Proxy] Upstream timeout for ${domain}`); + upstream.destroy(); + clientSocket.end(); + }); +} + +// ── Helpers ────────────────────────────────────────────────────────────── + +function buildTrace( + domain: string, + request: CapturedRequest, + response: CapturedResponse, + startTime: number, +): CapturedTrace { + return { + id: randomUUID(), + provider: detectProvider(domain), + domain, + model: extractModel(request.body) ?? undefined, + request, + response, + durationMs: Date.now() - startTime, + }; +} + +function detectProvider(domain: string): string { + for (const p of DEFAULT_PROVIDERS) { + if (p.domains.some((d) => domain.includes(d))) return p.id; + } + return "unknown"; +} + +function extractModel(body: string | null): string | null { + if (!body) return null; + try { return JSON.parse(body).model ?? null; } catch { return null; } +} + +function extractTokensFromBody(body: string, domain: string, trace: CapturedTrace): void { + try { + const json = JSON.parse(body); + if (domain.includes("anthropic")) { + trace.tokensIn = json.usage?.input_tokens; + trace.tokensOut = json.usage?.output_tokens; + } else if (domain.includes("openai")) { + trace.tokensIn = json.usage?.prompt_tokens; + trace.tokensOut = json.usage?.completion_tokens; + } else if (domain.includes("googleapis")) { + trace.tokensIn = json.usageMetadata?.promptTokenCount; + trace.tokensOut = json.usageMetadata?.candidatesTokenCount; + } + } catch {} +} + +export function stopProxy(): void { + connectProxy?.close(); + connectProxy = null; + for (const { server } of domainServers.values()) server.close(); + domainServers.clear(); + console.log("[Proxy] Stopped"); +} diff --git a/src/bun/proxy/sni-parser.ts b/src/bun/proxy/sni-parser.ts new file mode 100644 index 0000000..1dc43f3 --- /dev/null +++ b/src/bun/proxy/sni-parser.ts @@ -0,0 +1,66 @@ +/** + * Extract SNI hostname from a TLS ClientHello message. + * The SNI extension (type 0x0000) sits in plaintext in the first TLS record. + */ +export function extractSNI(data: Buffer): string | null { + // TLS record: type(1) version(2) length(2) + if (data.length < 5 || data[0] !== 0x16) return null; // not a handshake + + let offset = 5; // skip record header + + // Handshake: type(1) length(3) + if (offset >= data.length || data[offset] !== 0x01) return null; // not ClientHello + offset += 4; // skip handshake header + + // ClientHello: version(2) random(32) + offset += 2 + 32; + if (offset >= data.length) return null; + + // Session ID: length(1) + data + const sessionIdLen = data[offset]; + offset += 1 + sessionIdLen; + if (offset >= data.length) return null; + + // Cipher suites: length(2) + data + const cipherSuitesLen = data.readUInt16BE(offset); + offset += 2 + cipherSuitesLen; + if (offset >= data.length) return null; + + // Compression methods: length(1) + data + const compressionLen = data[offset]; + offset += 1 + compressionLen; + if (offset + 2 > data.length) return null; + + // Extensions: total_length(2) + const extensionsLen = data.readUInt16BE(offset); + offset += 2; + const extensionsEnd = offset + extensionsLen; + + while (offset + 4 <= extensionsEnd && offset + 4 <= data.length) { + const extType = data.readUInt16BE(offset); + const extLen = data.readUInt16BE(offset + 2); + offset += 4; + + if (extType === 0x0000) { + // SNI extension + // server_name_list: length(2) + if (offset + 2 > data.length) return null; + offset += 2; // skip list length + + // server_name: type(1) length(2) name(...) + if (offset + 3 > data.length) return null; + const nameType = data[offset]; + const nameLen = data.readUInt16BE(offset + 1); + offset += 3; + + if (nameType === 0x00 && offset + nameLen <= data.length) { + return data.subarray(offset, offset + nameLen).toString("ascii"); + } + return null; + } + + offset += extLen; + } + + return null; +} diff --git a/src/bun/sse/sse-reassembler.ts b/src/bun/sse/sse-reassembler.ts new file mode 100644 index 0000000..4954727 --- /dev/null +++ b/src/bun/sse/sse-reassembler.ts @@ -0,0 +1,130 @@ +import type { SSEChunk } from "../capture/types"; + +export interface ReassembledSSE { + chunks: SSEChunk[]; + fullBody: string; + provider?: string; + tokensIn?: number; + tokensOut?: number; +} + +/** + * Parse raw SSE text into individual chunks. + */ +export function parseSSEStream(raw: string): SSEChunk[] { + const chunks: SSEChunk[] = []; + const blocks = raw.split("\n\n"); + + for (const block of blocks) { + if (!block.trim()) continue; + + let event: string | undefined; + let data = ""; + let id: string | undefined; + + for (const line of block.split("\n")) { + if (line.startsWith("event:")) { + event = line.substring(6).trim(); + } else if (line.startsWith("data:")) { + const val = line.substring(5); + data += (data ? "\n" : "") + (val.startsWith(" ") ? val.substring(1) : val); + } else if (line.startsWith("id:")) { + id = line.substring(3).trim(); + } + } + + if (data) { + chunks.push({ timestamp: Date.now(), event, data, id }); + } + } + + return chunks; +} + +/** + * Reassemble SSE chunks into the full message content and extract token usage. + */ +export function reassembleSSE( + chunks: SSEChunk[], + domain: string, +): ReassembledSSE { + if (domain.includes("anthropic")) { + return reassembleAnthropic(chunks); + } + if (domain.includes("openai")) { + return reassembleOpenAI(chunks); + } + // Generic: just concatenate data fields + return { + chunks, + fullBody: chunks.map((c) => c.data).join("\n"), + }; +} + +function reassembleAnthropic(chunks: SSEChunk[]): ReassembledSSE { + const textParts: string[] = []; + let tokensIn: number | undefined; + let tokensOut: number | undefined; + + for (const chunk of chunks) { + if (chunk.data === "[DONE]") continue; + try { + const json = JSON.parse(chunk.data); + + if (chunk.event === "content_block_delta" && json.delta?.text) { + textParts.push(json.delta.text); + } + + if (chunk.event === "message_delta" && json.usage) { + tokensOut = json.usage.output_tokens; + } + + if (chunk.event === "message_start" && json.message?.usage) { + tokensIn = json.message.usage.input_tokens; + } + } catch { + // non-JSON SSE data, skip + } + } + + return { + chunks, + fullBody: textParts.join(""), + provider: "anthropic", + tokensIn, + tokensOut, + }; +} + +function reassembleOpenAI(chunks: SSEChunk[]): ReassembledSSE { + const textParts: string[] = []; + let tokensIn: number | undefined; + let tokensOut: number | undefined; + + for (const chunk of chunks) { + if (chunk.data === "[DONE]") continue; + try { + const json = JSON.parse(chunk.data); + + const delta = json.choices?.[0]?.delta; + if (delta?.content) { + textParts.push(delta.content); + } + + if (json.usage) { + tokensIn = json.usage.prompt_tokens; + tokensOut = json.usage.completion_tokens; + } + } catch { + // non-JSON SSE data, skip + } + } + + return { + chunks, + fullBody: textParts.join(""), + provider: "openai", + tokensIn, + tokensOut, + }; +} diff --git a/src/shared/types.ts b/src/shared/types.ts index 1c4c1a3..974dadf 100644 --- a/src/shared/types.ts +++ b/src/shared/types.ts @@ -20,9 +20,13 @@ export const DEFAULT_PROVIDERS: readonly Provider[] = [ // Capture state export type CaptureStatus = "active" | "inactive" | "error"; +export type CAStatus = "not-generated" | "generated" | "installed" | "expired"; +export type ProxyStatus = "stopped" | "starting" | "running" | "error"; export interface AppState { captureStatus: CaptureStatus; + caStatus: CAStatus; + proxyStatus: ProxyStatus; providers: Provider[]; proxyPort: number; tracesCount: number; diff --git a/src/views/mainview/assets/tray-icon-template.png b/src/views/mainview/assets/tray-icon-template.png index f7d0f2e..d5ad40f 100644 Binary files a/src/views/mainview/assets/tray-icon-template.png and b/src/views/mainview/assets/tray-icon-template.png differ diff --git a/src/views/mainview/assets/tray-icon-template@2x.png b/src/views/mainview/assets/tray-icon-template@2x.png new file mode 100644 index 0000000..713b52a Binary files /dev/null and b/src/views/mainview/assets/tray-icon-template@2x.png differ