diff --git a/.mcp.json.example b/.mcp.json.example index 416378b..0bafe08 100644 --- a/.mcp.json.example +++ b/.mcp.json.example @@ -1,11 +1,23 @@ { "_comment": "Run: ~/Gits/orchestrator/scripts/mcp-setup.sh ~/Gits/brainlayer", + "_comment2": "brainlayer-socat: BrainBar daemon on Unix socket (replaces 9 Python processes)", "mcpServers": { "brainlayer": { + "command": "socat", + "args": ["STDIO", "UNIX-CONNECT:/tmp/brainbar.sock"] + }, + "brainlayer-legacy": { + "_comment": "Fallback: original Python MCP server (remove after BrainBar is stable)", "command": "brainlayer-mcp" }, "voicelayer": { "command": "voicelayer-mcp" + }, + "cmux": { + "command": "node", + "args": [ + "/Users/etanheyman/Gits/orchestrator/tools/cmux-mcp/dist/index.js" + ] } } } diff --git a/brain-bar/.gitignore b/brain-bar/.gitignore new file mode 100644 index 0000000..d724af2 --- /dev/null +++ b/brain-bar/.gitignore @@ -0,0 +1,4 @@ +.build/ +.swiftpm/ +*.xcodeproj/ +DerivedData/ diff --git a/brain-bar/Package.swift b/brain-bar/Package.swift new file mode 100644 index 0000000..0c14b74 --- /dev/null +++ b/brain-bar/Package.swift @@ -0,0 +1,23 @@ +// swift-tools-version: 6.0 +import PackageDescription + +let package = Package( + name: "BrainBar", + platforms: [ + .macOS(.v14), + ], + targets: [ + .executableTarget( + name: "BrainBar", + path: "Sources/BrainBar", + linkerSettings: [ + .linkedLibrary("sqlite3"), + ] + ), + .testTarget( + name: "BrainBarTests", + dependencies: ["BrainBar"], + path: "Tests/BrainBarTests" + ), + ] +) diff --git a/brain-bar/Sources/BrainBar/BrainBarApp.swift b/brain-bar/Sources/BrainBar/BrainBarApp.swift new file mode 100644 index 0000000..e243505 --- /dev/null +++ b/brain-bar/Sources/BrainBar/BrainBarApp.swift @@ -0,0 +1,58 @@ +// BrainBarApp.swift — Entry point for BrainBar menu bar daemon. +// +// Menu bar app (no Dock icon) that owns the BrainLayer SQLite database +// and serves MCP tools over /tmp/brainbar.sock. + +import AppKit +import SwiftUI + +// MARK: - App Delegate + +final class AppDelegate: NSObject, NSApplicationDelegate { + private var server: BrainBarServer? + + func applicationDidFinishLaunching(_ notification: Notification) { + NSApp.setActivationPolicy(.accessory) + + let srv = BrainBarServer() + server = srv + srv.start() + } + + func applicationWillTerminate(_ notification: Notification) { + server?.stop() + } + + func applicationShouldTerminateAfterLastWindowClosed(_ sender: NSApplication) -> Bool { + false + } +} + +// MARK: - SwiftUI App entry point + +@main +struct BrainBarApp: App { + @NSApplicationDelegateAdaptor(AppDelegate.self) var appDelegate + + var body: some Scene { + MenuBarExtra("BrainBar", systemImage: "brain.head.profile") { + VStack(alignment: .leading, spacing: 6) { + Text("BrainBar") + .font(.system(.caption, weight: .bold)) + Text("Memory daemon active") + .font(.system(.caption2)) + .foregroundStyle(.secondary) + Divider() + Button("Quit BrainBar") { + NSApplication.shared.terminate(nil) + } + .keyboardShortcut("q") + } + .padding(8) + } + + Settings { + EmptyView() + } + } +} diff --git a/brain-bar/Sources/BrainBar/BrainBarServer.swift b/brain-bar/Sources/BrainBar/BrainBarServer.swift new file mode 100644 index 0000000..98aebbc --- /dev/null +++ b/brain-bar/Sources/BrainBar/BrainBarServer.swift @@ -0,0 +1,199 @@ +// BrainBarServer.swift — Integrated socket server + MCP router + database. +// +// Owns: +// - Unix domain socket on /tmp/brainbar.sock +// - MCP Content-Length framing parser +// - JSON-RPC router +// - SQLite database (single-writer) + +import Foundation + +final class BrainBarServer: @unchecked Sendable { + private let socketPath: String + private let dbPath: String + private let queue = DispatchQueue(label: "com.brainlayer.brainbar.server", qos: .userInitiated) + private var listenFD: Int32 = -1 + private var listenSource: DispatchSourceRead? + private var clients: [Int32: ClientState] = [:] + private var router: MCPRouter! + private var database: BrainDatabase! + + struct ClientState { + var source: DispatchSourceRead + var framing: MCPFraming + } + + init(socketPath: String = "/tmp/brainbar.sock", dbPath: String? = nil) { + self.socketPath = socketPath + self.dbPath = dbPath ?? Self.defaultDBPath() + } + + static func defaultDBPath() -> String { + let home = FileManager.default.homeDirectoryForCurrentUser.path + return "\(home)/.local/share/brainlayer/brainlayer.db" + } + + func start() { + queue.async { [weak self] in + self?.startOnQueue() + } + } + + func stop() { + queue.sync { + self.cleanup() + } + } + + private func startOnQueue() { + // Initialize database and router + database = BrainDatabase(path: dbPath) + router = MCPRouter() + router.setDatabase(database) + + // Clean up stale socket + unlink(socketPath) + + let fd = socket(AF_UNIX, SOCK_STREAM, 0) + guard fd >= 0 else { + NSLog("[BrainBar] Failed to create socket: errno %d", errno) + return + } + + var addr = sockaddr_un() + addr.sun_family = sa_family_t(AF_UNIX) + let pathBytes = socketPath.utf8CString + guard pathBytes.count <= MemoryLayout.size(ofValue: addr.sun_path) else { + NSLog("[BrainBar] Socket path too long (%d > %d): %@", + pathBytes.count, MemoryLayout.size(ofValue: addr.sun_path), socketPath) + close(fd) + return + } + withUnsafeMutablePointer(to: &addr.sun_path) { ptr in + ptr.withMemoryRebound(to: CChar.self, capacity: pathBytes.count) { dest in + pathBytes.withUnsafeBufferPointer { src in + _ = memcpy(dest, src.baseAddress!, src.count) + } + } + } + + let bindResult = withUnsafePointer(to: &addr) { addrPtr in + addrPtr.withMemoryRebound(to: sockaddr.self, capacity: 1) { ptr in + bind(fd, ptr, socklen_t(MemoryLayout.size)) + } + } + guard bindResult == 0 else { + NSLog("[BrainBar] Failed to bind: errno %d", errno) + close(fd) + return + } + + chmod(socketPath, 0o600) + + guard listen(fd, 16) == 0 else { + NSLog("[BrainBar] Failed to listen: errno %d", errno) + close(fd) + unlink(socketPath) + return + } + + listenFD = fd + + let source = DispatchSource.makeReadSource(fileDescriptor: fd, queue: queue) + source.setEventHandler { [weak self] in + self?.acceptClient() + } + source.setCancelHandler { [weak self] in + guard let self else { return } + close(fd) + listenFD = -1 + } + source.resume() + listenSource = source + + NSLog("[BrainBar] Server listening on %@", socketPath) + } + + private func acceptClient() { + let clientFD = accept(listenFD, nil, nil) + guard clientFD >= 0 else { return } + + let flags = fcntl(clientFD, F_GETFL) + _ = fcntl(clientFD, F_SETFL, flags | O_NONBLOCK) + + var nosigpipe: Int32 = 1 + setsockopt(clientFD, SOL_SOCKET, SO_NOSIGPIPE, &nosigpipe, socklen_t(MemoryLayout.size)) + + let readSource = DispatchSource.makeReadSource(fileDescriptor: clientFD, queue: queue) + readSource.setEventHandler { [weak self] in + self?.readFromClient(fd: clientFD) + } + readSource.setCancelHandler { + close(clientFD) + } + readSource.resume() + + clients[clientFD] = ClientState(source: readSource, framing: MCPFraming()) + NSLog("[BrainBar] Client connected (fd: %d)", clientFD) + } + + private func readFromClient(fd: Int32) { + var buf = [UInt8](repeating: 0, count: 65536) + let n = read(fd, &buf, buf.count) + + if n <= 0 { + if n == -1, errno == EAGAIN || errno == EWOULDBLOCK || errno == EINTR { + return + } + clients[fd]?.source.cancel() + clients.removeValue(forKey: fd) + return + } + + guard var state = clients[fd] else { return } + state.framing.append(Data(buf[0..= 0 { listenFD = -1 } + unlink(socketPath) + database?.close() + NSLog("[BrainBar] Server stopped") + } +} diff --git a/brain-bar/Sources/BrainBar/BrainDatabase.swift b/brain-bar/Sources/BrainBar/BrainDatabase.swift new file mode 100644 index 0000000..e930e61 --- /dev/null +++ b/brain-bar/Sources/BrainBar/BrainDatabase.swift @@ -0,0 +1,317 @@ +// BrainDatabase.swift — SQLite database layer for BrainBar. +// +// Wraps SQLite3 C API directly (no external dependencies). +// Configures: WAL mode, FTS5, busy_timeout=5000, cache_size=-64000, synchronous=NORMAL. +// Single-writer: only BrainBar writes. Concurrent reads are safe (WAL). +// +// IMPORTANT: Schema matches the production BrainLayer DB (312K chunks). +// Column names: id (not chunk_id), conversation_id (not session_id), +// source_file (NOT NULL), metadata (JSON), etc. + +import Foundation +import SQLite3 + +final class BrainDatabase: @unchecked Sendable { + private var db: OpaquePointer? + private let path: String + /// Whether the database opened successfully. + private(set) var isOpen: Bool = false + + init(path: String) { + self.path = path + openAndConfigure() + } + + private func openAndConfigure() { + // FULLMUTEX: SQLite serializes C-level access. Needed because WAL concurrent + // reads come from GCD threads, and close() could race with in-flight queries. + let flags = SQLITE_OPEN_READWRITE | SQLITE_OPEN_CREATE | SQLITE_OPEN_FULLMUTEX + let rc = sqlite3_open_v2(path, &db, flags, nil) + guard rc == SQLITE_OK else { + NSLog("[BrainBar] Failed to open database at %@: %d", path, rc) + return + } + isOpen = true + + // Configure PRAGMAs + exec("PRAGMA journal_mode = WAL") + exec("PRAGMA busy_timeout = 5000") + exec("PRAGMA cache_size = -64000") + exec("PRAGMA synchronous = NORMAL") + + // Create schema only for NEW databases (test DBs). + // Production DB already has schema — don't interfere. + ensureSchema() + } + + private func ensureSchema() { + // If chunks table already exists (production DB), don't touch schema. + if (try? tableExists("chunks")) == true { + return + } + + // New/test DB — create production-compatible schema + exec(""" + CREATE TABLE IF NOT EXISTS chunks ( + id TEXT PRIMARY KEY, + content TEXT NOT NULL, + metadata TEXT NOT NULL DEFAULT '{}', + source_file TEXT NOT NULL DEFAULT 'brainbar', + project TEXT, + content_type TEXT DEFAULT 'assistant_text', + value_type TEXT, + char_count INTEGER DEFAULT 0, + source TEXT DEFAULT 'claude_code', + sender TEXT, + language TEXT, + conversation_id TEXT, + position INTEGER, + context_summary TEXT, + tags TEXT DEFAULT '[]', + tag_confidence REAL, + summary TEXT, + importance REAL DEFAULT 5, + intent TEXT, + enriched_at TEXT, + created_at TEXT DEFAULT (datetime('now')) + ) + """) + + // FTS5 — matches production: content, summary, tags, resolved_query, chunk_id UNINDEXED + exec(""" + CREATE VIRTUAL TABLE IF NOT EXISTS chunks_fts USING fts5( + content, summary, tags, resolved_query, chunk_id UNINDEXED + ) + """) + + // FTS sync triggers (match production trigger names) + exec(""" + CREATE TRIGGER IF NOT EXISTS chunks_fts_insert AFTER INSERT ON chunks BEGIN + INSERT INTO chunks_fts(rowid, content, summary, tags, resolved_query, chunk_id) + VALUES (new.rowid, new.content, new.summary, new.tags, NULL, new.id); + END + """) + + exec(""" + CREATE TRIGGER IF NOT EXISTS chunks_fts_delete AFTER DELETE ON chunks BEGIN + INSERT INTO chunks_fts(chunks_fts, rowid, content, summary, tags, resolved_query, chunk_id) + VALUES ('delete', old.rowid, old.content, old.summary, old.tags, NULL, old.id); + END + """) + + exec(""" + CREATE TRIGGER IF NOT EXISTS chunks_fts_update AFTER UPDATE ON chunks BEGIN + INSERT INTO chunks_fts(chunks_fts, rowid, content, summary, tags, resolved_query, chunk_id) + VALUES ('delete', old.rowid, old.content, old.summary, old.tags, NULL, old.id); + INSERT INTO chunks_fts(rowid, content, summary, tags, resolved_query, chunk_id) + VALUES (new.rowid, new.content, new.summary, new.tags, NULL, new.id); + END + """) + } + + func close() { + if let db { + sqlite3_close(db) + self.db = nil + } + } + + // MARK: - PRAGMA queries + + private static let allowedPragmas: Set = [ + "journal_mode", "busy_timeout", "cache_size", "synchronous", + "wal_checkpoint", "page_count", "page_size", "freelist_count" + ] + + func pragma(_ name: String) throws -> String { + guard Self.allowedPragmas.contains(name) else { + throw DBError.invalidPragma(name) + } + guard let db else { throw DBError.notOpen } + var stmt: OpaquePointer? + let rc = sqlite3_prepare_v2(db, "PRAGMA \(name)", -1, &stmt, nil) + guard rc == SQLITE_OK else { throw DBError.prepare(rc) } + defer { sqlite3_finalize(stmt) } + + guard sqlite3_step(stmt) == SQLITE_ROW else { throw DBError.noResult } + guard let cStr = sqlite3_column_text(stmt, 0) else { throw DBError.noResult } + return String(cString: cStr) + } + + // MARK: - Table existence + + func tableExists(_ name: String) throws -> Bool { + guard let db else { throw DBError.notOpen } + var stmt: OpaquePointer? + let sql = "SELECT count(*) FROM sqlite_master WHERE type IN ('table','view') AND name = ?" + let rc = sqlite3_prepare_v2(db, sql, -1, &stmt, nil) + guard rc == SQLITE_OK else { throw DBError.prepare(rc) } + defer { sqlite3_finalize(stmt) } + + sqlite3_bind_text(stmt, 1, name, -1, unsafeBitCast(-1, to: sqlite3_destructor_type.self)) + guard sqlite3_step(stmt) == SQLITE_ROW else { return false } + return sqlite3_column_int(stmt, 0) > 0 + } + + // MARK: - Insert chunk (production schema) + + func insertChunk(id: String, content: String, sessionId: String, project: String, contentType: String, importance: Int) throws { + guard let db else { throw DBError.notOpen } + var stmt: OpaquePointer? + let sql = """ + INSERT OR REPLACE INTO chunks (id, content, metadata, source_file, project, content_type, importance, conversation_id, char_count) + VALUES (?, ?, '{}', 'brainbar', ?, ?, ?, ?, ?) + """ + let rc = sqlite3_prepare_v2(db, sql, -1, &stmt, nil) + guard rc == SQLITE_OK else { throw DBError.prepare(rc) } + defer { sqlite3_finalize(stmt) } + + let TRANSIENT = unsafeBitCast(-1, to: sqlite3_destructor_type.self) + sqlite3_bind_text(stmt, 1, id, -1, TRANSIENT) + sqlite3_bind_text(stmt, 2, content, -1, TRANSIENT) + sqlite3_bind_text(stmt, 3, project, -1, TRANSIENT) + sqlite3_bind_text(stmt, 4, contentType, -1, TRANSIENT) + sqlite3_bind_int(stmt, 5, Int32(importance)) + sqlite3_bind_text(stmt, 6, sessionId, -1, TRANSIENT) + sqlite3_bind_int(stmt, 7, Int32(content.count)) + + let stepRC = sqlite3_step(stmt) + guard stepRC == SQLITE_DONE else { throw DBError.step(stepRC) } + } + + // MARK: - FTS5 Search (production schema) + + func search(query: String, limit: Int) throws -> [[String: Any]] { + guard let db else { throw DBError.notOpen } + + let sanitized = sanitizeFTS5Query(query) + + var stmt: OpaquePointer? + // Production FTS5: content, summary, tags, resolved_query, chunk_id UNINDEXED + // Join on rowid. Use production column names (id, conversation_id, etc.) + let sql = """ + SELECT c.id, c.content, c.project, c.content_type, c.importance, + c.created_at, c.summary, c.tags, c.conversation_id + FROM chunks_fts f + JOIN chunks c ON f.rowid = c.rowid + WHERE chunks_fts MATCH ? + ORDER BY rank + LIMIT ? + """ + let rc = sqlite3_prepare_v2(db, sql, -1, &stmt, nil) + guard rc == SQLITE_OK else { throw DBError.prepare(rc) } + defer { sqlite3_finalize(stmt) } + + let TRANSIENT = unsafeBitCast(-1, to: sqlite3_destructor_type.self) + sqlite3_bind_text(stmt, 1, sanitized, -1, TRANSIENT) + sqlite3_bind_int(stmt, 2, Int32(limit)) + + var results: [[String: Any]] = [] + while sqlite3_step(stmt) == SQLITE_ROW { + var row: [String: Any] = [:] + row["chunk_id"] = columnText(stmt, 0) + row["content"] = columnText(stmt, 1) + row["project"] = columnText(stmt, 2) + row["content_type"] = columnText(stmt, 3) + row["importance"] = Int(sqlite3_column_int(stmt, 4)) + row["created_at"] = columnText(stmt, 5) + row["summary"] = columnText(stmt, 6) + row["tags"] = columnText(stmt, 7) + row["session_id"] = columnText(stmt, 8) + results.append(row) + } + + return results + } + + // MARK: - Store (brain_store, production schema) + + func store(content: String, tags: [String], importance: Int, source: String) throws -> String { + let id = "brainbar-\(UUID().uuidString.lowercased().prefix(12))" + let tagsJSON: String + if let data = try? JSONSerialization.data(withJSONObject: tags), + let str = String(data: data, encoding: .utf8) { + tagsJSON = str + } else { + tagsJSON = "[]" + } + + guard let db else { throw DBError.notOpen } + var stmt: OpaquePointer? + let sql = """ + INSERT INTO chunks (id, content, metadata, source_file, tags, importance, source, content_type, char_count) + VALUES (?, ?, '{}', 'brainbar-store', ?, ?, ?, 'user_message', ?) + """ + let rc = sqlite3_prepare_v2(db, sql, -1, &stmt, nil) + guard rc == SQLITE_OK else { throw DBError.prepare(rc) } + defer { sqlite3_finalize(stmt) } + + let TRANSIENT = unsafeBitCast(-1, to: sqlite3_destructor_type.self) + sqlite3_bind_text(stmt, 1, id, -1, TRANSIENT) + sqlite3_bind_text(stmt, 2, content, -1, TRANSIENT) + sqlite3_bind_text(stmt, 3, tagsJSON, -1, TRANSIENT) + sqlite3_bind_int(stmt, 4, Int32(importance)) + sqlite3_bind_text(stmt, 5, source, -1, TRANSIENT) + sqlite3_bind_int(stmt, 6, Int32(content.count)) + + let stepRC = sqlite3_step(stmt) + guard stepRC == SQLITE_DONE else { throw DBError.step(stepRC) } + + return id + } + + // MARK: - Helpers + + private func exec(_ sql: String) { + guard let db else { return } + var errMsg: UnsafeMutablePointer? + let rc = sqlite3_exec(db, sql, nil, nil, &errMsg) + if rc != SQLITE_OK { + let msg = errMsg.map { String(cString: $0) } ?? "unknown error" + NSLog("[BrainBar] SQL error: %@ (code: %d)", msg, rc) + sqlite3_free(errMsg) + } + } + + private func columnText(_ stmt: OpaquePointer?, _ col: Int32) -> String? { + guard let cStr = sqlite3_column_text(stmt, col) else { return nil } + return String(cString: cStr) + } + + private func sanitizeFTS5Query(_ query: String) -> String { + let tokens = query.split(separator: " ").compactMap { token -> String? in + let cleaned = token + .replacingOccurrences(of: "\"", with: "") + .replacingOccurrences(of: "*", with: "") + .trimmingCharacters(in: .whitespaces) + guard !cleaned.isEmpty else { return nil } + return "\"\(cleaned)\"" + } + guard !tokens.isEmpty else { return "\"\"" } + return tokens.joined(separator: " OR ") + } + + // MARK: - Errors + + enum DBError: LocalizedError { + case notOpen + case prepare(Int32) + case step(Int32) + case noResult + case invalidPragma(String) + + var errorDescription: String? { + switch self { + case .notOpen: return "Database not open" + case .prepare(let rc): return "SQLite prepare failed: \(rc)" + case .step(let rc): return "SQLite step failed: \(rc)" + case .noResult: return "No result" + case .invalidPragma(let name): return "PRAGMA '\(name)' not in allowlist" + } + } + } + + deinit { + close() + } +} diff --git a/brain-bar/Sources/BrainBar/MCPFraming.swift b/brain-bar/Sources/BrainBar/MCPFraming.swift new file mode 100644 index 0000000..08a420d --- /dev/null +++ b/brain-bar/Sources/BrainBar/MCPFraming.swift @@ -0,0 +1,91 @@ +// MCPFraming.swift — MCP Content-Length framing parser. +// +// MCP uses Content-Length headers (like LSP): +// Content-Length: N\r\n\r\n{json payload of exactly N bytes} +// +// This struct accumulates bytes and extracts complete JSON-RPC messages. + +import Foundation + +struct MCPFraming: Sendable { + private var buffer = Data() + private static let separator = Data("\r\n\r\n".utf8) + /// Max payload size (10 MB) — prevents DoS via absurd Content-Length values. + private static let maxContentLength = 10_000_000 + + /// Max total buffer size (16 MB) — prevents memory exhaustion from slow/malicious clients. + private static let maxBufferSize = 16_000_000 + + /// Append raw bytes from a socket read. Drops data if buffer would exceed limit. + mutating func append(_ data: Data) { + guard buffer.count + data.count <= Self.maxBufferSize else { + buffer.removeAll() + return + } + buffer.append(data) + } + + /// Extract all complete messages from the buffer. + /// Incomplete messages remain in the buffer for the next append. + mutating func extractMessages() -> [[String: Any]] { + var messages: [[String: Any]] = [] + + while true { + // Find the header/body separator + guard let separatorRange = buffer.range(of: Self.separator) else { + break + } + + // Parse Content-Length from headers + let headerData = buffer[buffer.startIndex.. 0, contentLength <= Self.maxContentLength else { + // Invalid or zero-length — skip past this separator + buffer = Data(buffer[separatorRange.upperBound...]) + continue + } + + let bodyStart = separatorRange.upperBound + + // Check if we have enough body bytes + guard buffer.count >= bodyStart + contentLength else { + // Incomplete body — wait for more data + break + } + + // Extract body + let bodyData = buffer[bodyStart..<(bodyStart + contentLength)] + buffer = Data(buffer[(bodyStart + contentLength)...]) + + // Parse JSON + if let json = try? JSONSerialization.jsonObject(with: bodyData) as? [String: Any] { + messages.append(json) + } + } + + return messages + } + + /// Encode a JSON-RPC response with Content-Length framing. + static func encode(_ response: [String: Any]) throws -> Data { + let jsonData = try JSONSerialization.data(withJSONObject: response) + let header = "Content-Length: \(jsonData.count)\r\n\r\n" + var frame = Data(header.utf8) + frame.append(jsonData) + return frame + } + + // MARK: - Private + + private func parseContentLength(_ header: String) -> Int? { + for line in header.split(separator: "\r\n") { + let trimmed = line.trimmingCharacters(in: .whitespaces) + if trimmed.lowercased().hasPrefix("content-length:") { + let value = trimmed.dropFirst("content-length:".count).trimmingCharacters(in: .whitespaces) + return Int(value) + } + } + return nil + } +} diff --git a/brain-bar/Sources/BrainBar/MCPRouter.swift b/brain-bar/Sources/BrainBar/MCPRouter.swift new file mode 100644 index 0000000..d76ba55 --- /dev/null +++ b/brain-bar/Sources/BrainBar/MCPRouter.swift @@ -0,0 +1,344 @@ +// MCPRouter.swift — MCP JSON-RPC method router. +// +// Routes the 3 core MCP methods: +// - initialize: handshake, return capabilities +// - tools/list: enumerate all 8 BrainLayer tools with schemas +// - tools/call: dispatch to tool handler by name +// +// Also handles notifications (no response) and unknown methods (error). + +import Foundation + +final class MCPRouter: @unchecked Sendable { + private var database: BrainDatabase? + + /// Inject database for tool handlers. + func setDatabase(_ db: BrainDatabase) { + self.database = db + } + + /// Handle a parsed JSON-RPC request and return a response. + /// Returns empty dict for notifications (no id). + func handle(_ request: [String: Any]) -> [String: Any] { + guard let method = request["method"] as? String else { + return jsonRPCError(id: request["id"], code: -32600, message: "Invalid request: missing method") + } + + // Notifications have no id — don't respond + let id = request["id"] + if id == nil { + // Notification — handle silently + return [:] + } + + switch method { + case "initialize": + return handleInitialize(id: id!, params: request["params"] as? [String: Any] ?? [:]) + case "tools/list": + return handleToolsList(id: id!) + case "tools/call": + return handleToolsCall(id: id!, params: request["params"] as? [String: Any] ?? [:]) + default: + return jsonRPCError(id: id, code: -32601, message: "Method not found: \(method)") + } + } + + // MARK: - initialize + + private func handleInitialize(id: Any, params: [String: Any]) -> [String: Any] { + return [ + "jsonrpc": "2.0", + "id": id, + "result": [ + "protocolVersion": "2024-11-05", + "capabilities": [ + "tools": ["listChanged": false] + ], + "serverInfo": [ + "name": "brainbar", + "version": "1.0.0" + ] + ] as [String: Any] + ] + } + + // MARK: - tools/list + + private func handleToolsList(id: Any) -> [String: Any] { + return [ + "jsonrpc": "2.0", + "id": id, + "result": [ + "tools": Self.toolDefinitions + ] + ] + } + + // MARK: - tools/call + + private func handleToolsCall(id: Any, params: [String: Any]) -> [String: Any] { + guard let toolName = params["name"] as? String else { + return jsonRPCError(id: id, code: -32602, message: "Missing tool name") + } + + let arguments = params["arguments"] as? [String: Any] ?? [:] + + // Check tool exists + guard Self.toolDefinitions.contains(where: { ($0["name"] as? String) == toolName }) else { + return jsonRPCError(id: id, code: -32601, message: "Unknown tool: \(toolName)") + } + + // Dispatch to handler + do { + let result = try dispatchTool(name: toolName, arguments: arguments) + return [ + "jsonrpc": "2.0", + "id": id, + "result": [ + "content": [ + ["type": "text", "text": result] + ] + ] as [String: Any] + ] + } catch { + return [ + "jsonrpc": "2.0", + "id": id, + "result": [ + "content": [ + ["type": "text", "text": "Error: \(error.localizedDescription)"] + ], + "isError": true + ] as [String: Any] + ] + } + } + + private func dispatchTool(name: String, arguments: [String: Any]) throws -> String { + switch name { + case "brain_search": + return try handleBrainSearch(arguments) + case "brain_store": + return try handleBrainStore(arguments) + case "brain_recall": + return try handleBrainRecall(arguments) + case "brain_entity": + return try handleBrainEntity(arguments) + case "brain_digest": + return try handleBrainDigest(arguments) + case "brain_update": + return try handleBrainUpdate(arguments) + case "brain_expand": + return try handleBrainExpand(arguments) + case "brain_tags": + return try handleBrainTags(arguments) + default: + throw ToolError.unknownTool(name) + } + } + + // MARK: - Tool Handlers + + private func handleBrainSearch(_ args: [String: Any]) throws -> String { + guard let query = args["query"] as? String else { + throw ToolError.missingParameter("query") + } + let limit = min(args["num_results"] as? Int ?? 5, 100) + guard let db = database else { + return "[]" + } + let results = try db.search(query: query, limit: limit) + let data = try JSONSerialization.data(withJSONObject: results) + return String(data: data, encoding: .utf8) ?? "[]" + } + + private func handleBrainStore(_ args: [String: Any]) throws -> String { + guard let content = args["content"] as? String else { + throw ToolError.missingParameter("content") + } + let tags = args["tags"] as? [String] ?? [] + let importance = args["importance"] as? Int ?? 5 + guard let db = database else { + throw ToolError.noDatabase + } + let id = try db.store(content: content, tags: tags, importance: importance, source: "mcp") + return jsonEncode(["chunk_id": id, "status": "stored"]) + } + + private func handleBrainRecall(_ args: [String: Any]) throws -> String { + throw ToolError.notImplemented("brain_recall") + } + + private func handleBrainEntity(_ args: [String: Any]) throws -> String { + guard let _ = args["query"] as? String else { + throw ToolError.missingParameter("query") + } + throw ToolError.notImplemented("brain_entity") + } + + private func handleBrainDigest(_ args: [String: Any]) throws -> String { + guard args["content"] is String else { + throw ToolError.missingParameter("content") + } + throw ToolError.notImplemented("brain_digest") + } + + private func handleBrainUpdate(_ args: [String: Any]) throws -> String { + guard let _ = args["action"] as? String else { + throw ToolError.missingParameter("action") + } + throw ToolError.notImplemented("brain_update") + } + + private func handleBrainExpand(_ args: [String: Any]) throws -> String { + guard let _ = args["chunk_id"] as? String else { + throw ToolError.missingParameter("chunk_id") + } + throw ToolError.notImplemented("brain_expand") + } + + private func handleBrainTags(_ args: [String: Any]) throws -> String { + throw ToolError.notImplemented("brain_tags") + } + + /// Safe JSON encoding — never use string interpolation with user data. + private func jsonEncode(_ dict: [String: Any]) -> String { + guard let data = try? JSONSerialization.data(withJSONObject: dict), + let str = String(data: data, encoding: .utf8) else { + return "{}" + } + return str + } + + // MARK: - Error helpers + + private func jsonRPCError(id: Any?, code: Int, message: String) -> [String: Any] { + var response: [String: Any] = [ + "jsonrpc": "2.0", + "error": [ + "code": code, + "message": message + ] + ] + if let id { response["id"] = id } + return response + } + + enum ToolError: LocalizedError { + case unknownTool(String) + case missingParameter(String) + case noDatabase + case notImplemented(String) + + var errorDescription: String? { + switch self { + case .unknownTool(let name): return "Unknown tool: \(name)" + case .missingParameter(let param): return "Missing required parameter: \(param)" + case .noDatabase: return "Database not available" + case .notImplemented(let tool): return "\(tool) not yet implemented in BrainBar (use Python MCP server)" + } + } + } + + // MARK: - Tool Definitions + + nonisolated(unsafe) static let toolDefinitions: [[String: Any]] = [ + [ + "name": "brain_search", + "description": "Search through past conversations and learnings. Hybrid semantic + keyword search.", + "inputSchema": [ + "type": "object", + "properties": [ + "query": ["type": "string", "description": "Natural language search query"], + "num_results": ["type": "integer", "description": "Number of results (default: 5, max: 100)"], + "project": ["type": "string", "description": "Filter by project name"], + "tag": ["type": "string", "description": "Filter by tag"], + "importance_min": ["type": "number", "description": "Minimum importance score (1-10)"], + "detail": ["type": "string", "enum": ["compact", "full"], "description": "Result detail level"], + ] as [String: Any], + "required": ["query"] + ] as [String: Any] + ], + [ + "name": "brain_store", + "description": "Save decisions, learnings, mistakes, ideas, todos to memory.", + "inputSchema": [ + "type": "object", + "properties": [ + "content": ["type": "string", "description": "Content to store"], + "tags": ["type": "array", "items": ["type": "string"], "description": "Tags for categorization"], + "importance": ["type": "integer", "description": "Importance score (1-10)"], + ] as [String: Any], + "required": ["content"] + ] as [String: Any] + ], + [ + "name": "brain_recall", + "description": "Get current working context, browse sessions, or inspect session details.", + "inputSchema": [ + "type": "object", + "properties": [ + "mode": ["type": "string", "enum": ["context", "sessions", "operations", "plan", "summary", "stats"], "description": "Recall mode"], + "session_id": ["type": "string", "description": "Session ID for operations/summary mode"], + ] as [String: Any], + ] as [String: Any] + ], + [ + "name": "brain_entity", + "description": "Look up a known entity in the knowledge graph.", + "inputSchema": [ + "type": "object", + "properties": [ + "query": ["type": "string", "description": "Entity name to look up"], + ] as [String: Any], + "required": ["query"] + ] as [String: Any] + ], + [ + "name": "brain_digest", + "description": "Ingest raw content (transcripts, docs, articles). Extracts entities, relations, action items.", + "inputSchema": [ + "type": "object", + "properties": [ + "content": ["type": "string", "description": "Raw content to digest"], + ] as [String: Any], + "required": ["content"] + ] as [String: Any] + ], + [ + "name": "brain_update", + "description": "Update, archive, or merge existing memories.", + "inputSchema": [ + "type": "object", + "properties": [ + "action": ["type": "string", "enum": ["update", "archive", "merge"], "description": "Action to perform"], + "chunk_id": ["type": "string", "description": "Chunk ID to update"], + ] as [String: Any], + "required": ["action", "chunk_id"] + ] as [String: Any] + ], + [ + "name": "brain_expand", + "description": "Drill into a specific search result. Returns full content + surrounding chunks.", + "inputSchema": [ + "type": "object", + "properties": [ + "chunk_id": ["type": "string", "description": "Chunk ID to expand"], + "before": ["type": "integer", "description": "Context chunks before (default: 3)"], + "after": ["type": "integer", "description": "Context chunks after (default: 3)"], + ] as [String: Any], + "required": ["chunk_id"] + ] as [String: Any] + ], + [ + "name": "brain_tags", + "description": "List, search, or suggest tags across the knowledge base.", + "inputSchema": [ + "type": "object", + "properties": [ + "query": ["type": "string", "description": "Optional search query to filter tags"], + ] as [String: Any], + ] as [String: Any] + ], + ] +} diff --git a/brain-bar/Tests/BrainBarTests/DatabaseTests.swift b/brain-bar/Tests/BrainBarTests/DatabaseTests.swift new file mode 100644 index 0000000..8b9b21e --- /dev/null +++ b/brain-bar/Tests/BrainBarTests/DatabaseTests.swift @@ -0,0 +1,137 @@ +// DatabaseTests.swift — RED tests for BrainBar SQLite database layer. +// +// BrainBar embeds SQLite with: +// - WAL mode +// - FTS5 for full-text search +// - busy_timeout=5000 +// - cache_size=-64000 (64MB) +// - synchronous=NORMAL +// - Single-writer architecture (no concurrent writes) + +import XCTest +@testable import BrainBar + +final class DatabaseTests: XCTestCase { + var db: BrainDatabase! + var tempDBPath: String! + + override func setUp() { + super.setUp() + tempDBPath = NSTemporaryDirectory() + "brainbar-test-\(UUID().uuidString).db" + db = BrainDatabase(path: tempDBPath) + } + + override func tearDown() { + db.close() + try? FileManager.default.removeItem(atPath: tempDBPath) + try? FileManager.default.removeItem(atPath: tempDBPath + "-wal") + try? FileManager.default.removeItem(atPath: tempDBPath + "-shm") + super.tearDown() + } + + // MARK: - PRAGMAs + + func testWALModeEnabled() throws { + let mode = try db.pragma("journal_mode") + XCTAssertEqual(mode, "wal") + } + + func testBusyTimeoutSet() throws { + let timeout = try db.pragma("busy_timeout") + XCTAssertEqual(timeout, "5000") + } + + func testCacheSizeSet() throws { + let cacheSize = try db.pragma("cache_size") + XCTAssertEqual(cacheSize, "-64000") + } + + func testSynchronousNormal() throws { + let sync = try db.pragma("synchronous") + // NORMAL = 1 + XCTAssertEqual(sync, "1") + } + + // MARK: - Schema + + func testChunksTableExists() throws { + let exists = try db.tableExists("chunks") + XCTAssertTrue(exists, "chunks table must exist") + } + + func testFTSTableExists() throws { + let exists = try db.tableExists("chunks_fts") + XCTAssertTrue(exists, "chunks_fts FTS5 table must exist") + } + + // MARK: - Search (FTS5) + + func testFTSSearchReturnsResults() throws { + // Insert a test chunk + try db.insertChunk( + id: "test-chunk-1", + content: "Authentication was implemented using JWT tokens with refresh rotation", + sessionId: "session-1", + project: "brainlayer", + contentType: "assistant_text", + importance: 7 + ) + + let results = try db.search(query: "authentication JWT", limit: 10) + XCTAssertFalse(results.isEmpty, "FTS search should find the inserted chunk") + XCTAssertEqual(results.first?["chunk_id"] as? String, "test-chunk-1") + } + + func testSearchReturnsEmptyForNoMatch() throws { + let results = try db.search(query: "xyznonexistent123", limit: 10) + XCTAssertTrue(results.isEmpty) + } + + // MARK: - Store + + func testStoreCreatesChunk() throws { + let id = try db.store( + content: "Decision: Use GRDB for SQLite access", + tags: ["decision", "architecture"], + importance: 8, + source: "mcp" + ) + + XCTAssertFalse(id.isEmpty, "store should return a chunk ID") + + // Verify it's searchable + let results = try db.search(query: "GRDB SQLite", limit: 10) + XCTAssertFalse(results.isEmpty) + } + + // MARK: - Concurrent reads + + func testConcurrentReadsDoNotBlock() throws { + // Insert test data + try db.insertChunk( + id: "concurrent-1", + content: "Test chunk for concurrent reads", + sessionId: "session-1", + project: "brainlayer", + contentType: "assistant_text", + importance: 5 + ) + + let expectation = XCTestExpectation(description: "concurrent reads") + expectation.expectedFulfillmentCount = 10 + + for _ in 0..<10 { + DispatchQueue.global().async { + do { + let results = try self.db.search(query: "concurrent", limit: 5) + XCTAssertFalse(results.isEmpty) + } catch { + XCTFail("Concurrent read failed: \(error)") + } + expectation.fulfill() + } + } + + wait(for: [expectation], timeout: 5.0) + } +} diff --git a/brain-bar/Tests/BrainBarTests/MCPFramingTests.swift b/brain-bar/Tests/BrainBarTests/MCPFramingTests.swift new file mode 100644 index 0000000..ff6ed6a --- /dev/null +++ b/brain-bar/Tests/BrainBarTests/MCPFramingTests.swift @@ -0,0 +1,124 @@ +// MCPFramingTests.swift — RED tests for MCP Content-Length framing parser. +// +// MCP uses Content-Length headers (like LSP): +// Content-Length: N\r\n\r\n{json payload of exactly N bytes} +// +// The framing parser must: +// 1. Parse complete messages from a byte buffer +// 2. Handle partial messages (return nil, keep buffer) +// 3. Handle multiple messages in a single buffer +// 4. Handle split headers (Content-Length arrives in one read, body in next) + +import XCTest +@testable import BrainBar + +final class MCPFramingTests: XCTestCase { + + // MARK: - Single complete message + + func testParsesSingleCompleteMessage() throws { + var framing = MCPFraming() + let json = #"{"jsonrpc":"2.0","id":1,"method":"initialize"}"# + let frame = "Content-Length: \(json.utf8.count)\r\n\r\n\(json)" + + framing.append(Data(frame.utf8)) + let messages = framing.extractMessages() + + XCTAssertEqual(messages.count, 1) + let msg = try XCTUnwrap(messages.first) + XCTAssertEqual(msg["method"] as? String, "initialize") + XCTAssertEqual(msg["id"] as? Int, 1) + } + + // MARK: - Partial message (header only) + + func testBuffersPartialMessage() { + var framing = MCPFraming() + let json = #"{"jsonrpc":"2.0","id":1,"method":"initialize"}"# + let header = "Content-Length: \(json.utf8.count)\r\n\r\n" + + // Only send header, no body yet + framing.append(Data(header.utf8)) + let messages = framing.extractMessages() + XCTAssertTrue(messages.isEmpty, "Should not yield message when body is incomplete") + } + + // MARK: - Partial message completed in second append + + func testCompletesPartialMessageOnSecondAppend() throws { + var framing = MCPFraming() + let json = #"{"jsonrpc":"2.0","id":2,"method":"tools/list"}"# + let header = "Content-Length: \(json.utf8.count)\r\n\r\n" + + framing.append(Data(header.utf8)) + XCTAssertTrue(framing.extractMessages().isEmpty) + + framing.append(Data(json.utf8)) + let messages = framing.extractMessages() + XCTAssertEqual(messages.count, 1) + XCTAssertEqual(messages.first?["method"] as? String, "tools/list") + } + + // MARK: - Multiple messages in one buffer + + func testParsesMultipleMessagesInOneBuffer() { + var framing = MCPFraming() + let json1 = #"{"jsonrpc":"2.0","id":1,"method":"initialize"}"# + let json2 = #"{"jsonrpc":"2.0","id":2,"method":"tools/list"}"# + let frame1 = "Content-Length: \(json1.utf8.count)\r\n\r\n\(json1)" + let frame2 = "Content-Length: \(json2.utf8.count)\r\n\r\n\(json2)" + + framing.append(Data((frame1 + frame2).utf8)) + let messages = framing.extractMessages() + + XCTAssertEqual(messages.count, 2) + XCTAssertEqual(messages[0]["method"] as? String, "initialize") + XCTAssertEqual(messages[1]["method"] as? String, "tools/list") + } + + // MARK: - Split header across reads + + func testHandlesSplitHeader() throws { + var framing = MCPFraming() + let json = #"{"jsonrpc":"2.0","id":3,"method":"tools/call"}"# + let fullFrame = "Content-Length: \(json.utf8.count)\r\n\r\n\(json)" + + // Split in the middle of "Content-Length" + let splitPoint = 8 + let part1 = Data(Array(fullFrame.utf8)[0.. = [ + "brain_search", "brain_store", "brain_recall", "brain_entity", + "brain_digest", "brain_update", "brain_expand", "brain_tags" + ] + XCTAssertEqual(toolNames, expected) + } + + func testEachToolHasInputSchema() throws { + let router = MCPRouter() + let request: [String: Any] = [ + "jsonrpc": "2.0", + "id": 3, + "method": "tools/list", + ] + + let response = router.handle(request) + let tools = (response["result"] as? [String: Any])?["tools"] as? [[String: Any]] ?? [] + + for tool in tools { + let name = tool["name"] as? String ?? "unknown" + XCTAssertNotNil(tool["description"] as? String, "\(name) must have description") + XCTAssertNotNil(tool["inputSchema"] as? [String: Any], "\(name) must have inputSchema") + } + } + + // MARK: - Tools call + + func testToolsCallDispatchesToHandler() throws { + let router = MCPRouter() + let request: [String: Any] = [ + "jsonrpc": "2.0", + "id": 4, + "method": "tools/call", + "params": [ + "name": "brain_search", + "arguments": ["query": "test query"] + ] + ] + + let response = router.handle(request) + + // Should not be an error + XCTAssertNil(response["error"], "brain_search should not return error") + XCTAssertNotNil(response["result"], "brain_search should return result") + XCTAssertEqual(response["id"] as? Int, 4) + } + + func testToolsCallUnknownToolReturnsError() throws { + let router = MCPRouter() + let request: [String: Any] = [ + "jsonrpc": "2.0", + "id": 5, + "method": "tools/call", + "params": [ + "name": "nonexistent_tool", + "arguments": [:] as [String: Any] + ] + ] + + let response = router.handle(request) + let error = response["error"] as? [String: Any] + + XCTAssertNotNil(error, "Unknown tool should return JSON-RPC error") + XCTAssertEqual(error?["code"] as? Int, -32601, "Should be method-not-found error") + } + + // MARK: - Unknown method + + func testUnknownMethodReturnsError() throws { + let router = MCPRouter() + let request: [String: Any] = [ + "jsonrpc": "2.0", + "id": 6, + "method": "unknown/method", + ] + + let response = router.handle(request) + let error = response["error"] as? [String: Any] + + XCTAssertNotNil(error) + XCTAssertEqual(error?["code"] as? Int, -32601) + } + + // MARK: - Notifications (no id) + + func testNotificationDoesNotRequireResponse() { + let router = MCPRouter() + let request: [String: Any] = [ + "jsonrpc": "2.0", + "method": "notifications/initialized", + ] + + let response = router.handle(request) + // Notifications should return empty/nil response (no id to respond to) + XCTAssertTrue(response.isEmpty || response["id"] == nil) + } +} diff --git a/brain-bar/Tests/BrainBarTests/SocketIntegrationTests.swift b/brain-bar/Tests/BrainBarTests/SocketIntegrationTests.swift new file mode 100644 index 0000000..79e077d --- /dev/null +++ b/brain-bar/Tests/BrainBarTests/SocketIntegrationTests.swift @@ -0,0 +1,191 @@ +// SocketIntegrationTests.swift — RED tests for end-to-end socket + MCP flow. +// +// Tests the full pipeline: connect to Unix socket → send Content-Length framed +// MCP request → receive Content-Length framed response. + +import XCTest +@testable import BrainBar + +final class SocketIntegrationTests: XCTestCase { + let testSocketPath = "/tmp/brainbar-test-\(ProcessInfo.processInfo.processIdentifier).sock" + var server: BrainBarServer! + + override func setUp() { + super.setUp() + let tempDB = NSTemporaryDirectory() + "brainbar-integration-\(UUID().uuidString).db" + server = BrainBarServer(socketPath: testSocketPath, dbPath: tempDB) + server.start() + // Give server time to bind + Thread.sleep(forTimeInterval: 0.2) + } + + override func tearDown() { + server.stop() + super.tearDown() + } + + // MARK: - Connection + + func testConnectsToSocket() throws { + let fd = socket(AF_UNIX, SOCK_STREAM, 0) + XCTAssertGreaterThanOrEqual(fd, 0, "Should create socket") + defer { close(fd) } + + var addr = sockaddr_un() + addr.sun_family = sa_family_t(AF_UNIX) + withUnsafeMutablePointer(to: &addr.sun_path) { ptr in + ptr.withMemoryRebound(to: CChar.self, capacity: 104) { dest in + _ = testSocketPath.withCString { src in + strcpy(dest, src) + } + } + } + + let result = withUnsafePointer(to: &addr) { addrPtr in + addrPtr.withMemoryRebound(to: sockaddr.self, capacity: 1) { ptr in + connect(fd, ptr, socklen_t(MemoryLayout.size)) + } + } + XCTAssertEqual(result, 0, "Should connect to brainbar socket (errno: \(errno))") + } + + // MARK: - MCP Initialize handshake + + func testMCPInitializeOverSocket() throws { + let response = try sendMCPRequest([ + "jsonrpc": "2.0", + "id": 1, + "method": "initialize", + "params": [ + "protocolVersion": "2024-11-05", + "capabilities": [:] as [String: Any], + "clientInfo": ["name": "test", "version": "1.0"] + ] + ]) + + let result = response["result"] as? [String: Any] + XCTAssertNotNil(result) + XCTAssertEqual(result?["protocolVersion"] as? String, "2024-11-05") + } + + // MARK: - MCP tools/list over socket + + func testMCPToolsListOverSocket() throws { + // Must initialize first + _ = try sendMCPRequest([ + "jsonrpc": "2.0", + "id": 1, + "method": "initialize", + "params": [ + "protocolVersion": "2024-11-05", + "capabilities": [:] as [String: Any], + "clientInfo": ["name": "test", "version": "1.0"] + ] + ]) + + let response = try sendMCPRequest([ + "jsonrpc": "2.0", + "id": 2, + "method": "tools/list", + ]) + + let tools = (response["result"] as? [String: Any])?["tools"] as? [[String: Any]] + XCTAssertNotNil(tools) + XCTAssertEqual(tools?.count, 8) + } + + // MARK: - MCP tools/call brain_search over socket + + func testMCPBrainSearchOverSocket() throws { + // Initialize + _ = try sendMCPRequest([ + "jsonrpc": "2.0", "id": 1, "method": "initialize", + "params": ["protocolVersion": "2024-11-05", "capabilities": [:] as [String: Any], + "clientInfo": ["name": "test", "version": "1.0"]] + ]) + + let response = try sendMCPRequest([ + "jsonrpc": "2.0", + "id": 3, + "method": "tools/call", + "params": [ + "name": "brain_search", + "arguments": ["query": "test search"] + ] + ]) + + XCTAssertNil(response["error"], "brain_search should not error") + XCTAssertNotNil(response["result"]) + } + + // MARK: - Helper + + private func sendMCPRequest(_ request: [String: Any]) throws -> [String: Any] { + let fd = socket(AF_UNIX, SOCK_STREAM, 0) + guard fd >= 0 else { throw NSError(domain: "test", code: 1, userInfo: [NSLocalizedDescriptionKey: "socket() failed"]) } + defer { close(fd) } + + var addr = sockaddr_un() + addr.sun_family = sa_family_t(AF_UNIX) + withUnsafeMutablePointer(to: &addr.sun_path) { ptr in + ptr.withMemoryRebound(to: CChar.self, capacity: 104) { dest in + _ = testSocketPath.withCString { src in + strcpy(dest, src) + } + } + } + + let connectResult = withUnsafePointer(to: &addr) { addrPtr in + addrPtr.withMemoryRebound(to: sockaddr.self, capacity: 1) { ptr in + connect(fd, ptr, socklen_t(MemoryLayout.size)) + } + } + guard connectResult == 0 else { + throw NSError(domain: "test", code: 2, userInfo: [NSLocalizedDescriptionKey: "connect() failed: errno \(errno)"]) + } + + // Send Content-Length framed request + let jsonData = try JSONSerialization.data(withJSONObject: request) + let header = "Content-Length: \(jsonData.count)\r\n\r\n" + var frame = Data(header.utf8) + frame.append(jsonData) + + let sent = frame.withUnsafeBytes { ptr in + write(fd, ptr.baseAddress!, frame.count) + } + guard sent == frame.count else { + throw NSError(domain: "test", code: 3, userInfo: [NSLocalizedDescriptionKey: "write() incomplete"]) + } + + // Read response with Content-Length framing + var buffer = Data() + var readBuf = [UInt8](repeating: 0, count: 65536) + let deadline = Date().addingTimeInterval(5.0) + + while Date() < deadline { + let n = read(fd, &readBuf, readBuf.count) + if n > 0 { + buffer.append(contentsOf: readBuf[0..= bodyStart + cl { + let bodyData = buffer[bodyStart..<(bodyStart + cl)] + return try JSONSerialization.jsonObject(with: bodyData) as? [String: Any] ?? [:] + } + } + } + } else if n == 0 { + break // EOF + } else if errno != EAGAIN && errno != EINTR { + break + } + Thread.sleep(forTimeInterval: 0.01) + } + + throw NSError(domain: "test", code: 4, userInfo: [NSLocalizedDescriptionKey: "Timeout reading response"]) + } +} diff --git a/brain-bar/build-app.sh b/brain-bar/build-app.sh new file mode 100755 index 0000000..ddbc9bb --- /dev/null +++ b/brain-bar/build-app.sh @@ -0,0 +1,45 @@ +#!/usr/bin/env bash +# Build BrainBar as a proper macOS .app bundle. +# +# Usage: bash brain-bar/build-app.sh +# +# Output: /Applications/BrainBar.app + +set -euo pipefail + +SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)" +PACKAGE_DIR="$SCRIPT_DIR" +BUNDLE_DIR="$SCRIPT_DIR/bundle" +APP_DIR="${BRAINBAR_APP_DIR:-$HOME/Applications/BrainBar.app}" + +echo "[build-app] Building BrainBar (release)..." +swift build -c release --package-path "$PACKAGE_DIR" + +# Find the built binary +BIN_DIR="$(swift build -c release --package-path "$PACKAGE_DIR" --show-bin-path)" +BINARY="$BIN_DIR/BrainBar" +if [ ! -f "$BINARY" ]; then + echo "[build-app] ERROR: Binary not found at $BINARY" + exit 1 +fi + +# Clean stale bundle +if [ -d "$APP_DIR" ]; then + echo "[build-app] Removing old bundle..." + rm -rf "$APP_DIR" +fi + +echo "[build-app] Creating .app bundle at $APP_DIR..." +mkdir -p "$APP_DIR/Contents/MacOS" +mkdir -p "$APP_DIR/Contents/Resources" + +cp "$BUNDLE_DIR/Info.plist" "$APP_DIR/Contents/" +cp "$BINARY" "$APP_DIR/Contents/MacOS/BrainBar" + +# Ad-hoc codesign +echo "[build-app] Signing..." +codesign --force --sign - "$APP_DIR" + +echo "[build-app] Done: $APP_DIR" +echo "[build-app] Socket: /tmp/brainbar.sock" +echo "[build-app] DB: ~/.local/share/brainlayer/brainlayer.db" diff --git a/brain-bar/bundle/Info.plist b/brain-bar/bundle/Info.plist new file mode 100644 index 0000000..28d8c3f --- /dev/null +++ b/brain-bar/bundle/Info.plist @@ -0,0 +1,26 @@ + + + + + CFBundleName + BrainBar + CFBundleDisplayName + BrainBar + CFBundleIdentifier + com.brainlayer.brainbar + CFBundleVersion + 1.0.0 + CFBundleShortVersionString + 1.0.0 + CFBundlePackageType + APPL + CFBundleExecutable + BrainBar + LSMinimumSystemVersion + 14.0 + LSUIElement + + NSHighResolutionCapable + + + diff --git a/brain-bar/bundle/com.brainlayer.brainbar.plist b/brain-bar/bundle/com.brainlayer.brainbar.plist new file mode 100644 index 0000000..4a8b823 --- /dev/null +++ b/brain-bar/bundle/com.brainlayer.brainbar.plist @@ -0,0 +1,27 @@ + + + + + Label + com.brainlayer.brainbar + ProgramArguments + + /Applications/BrainBar.app/Contents/MacOS/BrainBar + + RunAtLoad + + KeepAlive + + SuccessfulExit + + + StandardOutPath + /tmp/brainbar.stdout.log + StandardErrorPath + /tmp/brainbar.stderr.log + ProcessType + Interactive + ThrottleInterval + 10 + + diff --git a/src/brainlayer/ingest/codex.py b/src/brainlayer/ingest/codex.py index 6589346..2cf35bc 100644 --- a/src/brainlayer/ingest/codex.py +++ b/src/brainlayer/ingest/codex.py @@ -20,7 +20,6 @@ import json import logging import re -import sys from datetime import datetime, timezone from pathlib import Path from typing import Iterator, List, Optional @@ -378,6 +377,7 @@ def ingest_codex_session( if db_path is None: from ..paths import DEFAULT_DB_PATH + db_path = DEFAULT_DB_PATH from ..index_new import index_chunks_to_sqlite @@ -398,6 +398,7 @@ def ingest_codex_session( if session_id: try: from ..vector_store import VectorStore + with VectorStore(db_path) as store: store.store_session_context( session_id=session_id, @@ -443,10 +444,7 @@ def ingest_codex_dir( if since_days is not None: cutoff = datetime.now(timezone.utc).timestamp() - since_days * 86400 - jsonl_files = [ - f for f in jsonl_files - if f.stat().st_mtime >= cutoff - ] + jsonl_files = [f for f in jsonl_files if f.stat().st_mtime >= cutoff] if not jsonl_files: logger.info("No Codex session files found in %s", sessions_dir) @@ -455,17 +453,17 @@ def ingest_codex_dir( # Skip files already indexed (check DB for existing source_file entries) if not dry_run and db_path is None: from ..paths import DEFAULT_DB_PATH + db_path = DEFAULT_DB_PATH already_indexed: set[str] = set() if not dry_run and db_path and db_path.exists(): try: from ..vector_store import VectorStore + with VectorStore(db_path) as store: cursor = store._read_cursor() - rows = cursor.execute( - "SELECT DISTINCT source_file FROM chunks WHERE source = 'codex_cli'" - ) + rows = cursor.execute("SELECT DISTINCT source_file FROM chunks WHERE source = 'codex_cli'") already_indexed = {row[0] for row in rows} except Exception as exc: logger.debug("Could not check existing codex_cli chunks: %s", exc) @@ -503,15 +501,12 @@ def ingest_codex_dir( logging.basicConfig(level=logging.INFO, format="%(levelname)s %(message)s") - parser = argparse.ArgumentParser( - description="Ingest Codex session transcripts into BrainLayer." - ) + parser = argparse.ArgumentParser(description="Ingest Codex session transcripts into BrainLayer.") parser.add_argument( "path", nargs="?", default=None, - help="Path to a Codex session JSONL file or sessions directory " - "(default: ~/.codex/sessions)", + help="Path to a Codex session JSONL file or sessions directory (default: ~/.codex/sessions)", ) parser.add_argument("--dry-run", action="store_true", help="Parse but do not write to DB") parser.add_argument("--project", default=None, help="Override project name") diff --git a/tests/test_ingest_codex.py b/tests/test_ingest_codex.py index fa9d600..d0931e5 100644 --- a/tests/test_ingest_codex.py +++ b/tests/test_ingest_codex.py @@ -1,11 +1,8 @@ """Tests for the Codex CLI session ingestion adapter.""" import json -import tempfile from pathlib import Path -import pytest - from brainlayer.ingest.codex import ( _classify_tool_output, _extract_input_text, @@ -16,7 +13,6 @@ parse_codex_session, ) - # --------------------------------------------------------------------------- # Fixtures — minimal Codex JSONL session files # --------------------------------------------------------------------------- @@ -227,10 +223,13 @@ def test_generic_file_read(self): class TestParseCodexSession: def test_real_user_message_is_parsed(self, tmp_path): f = tmp_path / "session.jsonl" - _write_jsonl(f, [ - _session_meta(), - _user_msg("Fix the authentication bug in auth.py"), - ]) + _write_jsonl( + f, + [ + _session_meta(), + _user_msg("Fix the authentication bug in auth.py"), + ], + ) entries = list(parse_codex_session(f)) assert len(entries) == 1 assert entries[0]["content_type"] == "user_message" @@ -240,40 +239,52 @@ def test_real_user_message_is_parsed(self, tmp_path): def test_developer_messages_skipped(self, tmp_path): f = tmp_path / "session.jsonl" - _write_jsonl(f, [ - _session_meta(), - _developer_msg("You are a coding agent. sandbox_mode is danger-full-access."), - ]) + _write_jsonl( + f, + [ + _session_meta(), + _developer_msg("You are a coding agent. sandbox_mode is danger-full-access."), + ], + ) entries = list(parse_codex_session(f)) assert entries == [] def test_system_injections_skipped(self, tmp_path): f = tmp_path / "session.jsonl" - _write_jsonl(f, [ - _session_meta(), - _user_msg("# AGENTS.md instructions for /Users/test/Gits/myproject\n\nYou are Codex..."), - ]) + _write_jsonl( + f, + [ + _session_meta(), + _user_msg("# AGENTS.md instructions for /Users/test/Gits/myproject\n\nYou are Codex..."), + ], + ) entries = list(parse_codex_session(f)) assert entries == [] def test_environment_context_skipped(self, tmp_path): f = tmp_path / "session.jsonl" - _write_jsonl(f, [ - _session_meta(), - _user_msg("\n/Users/test/Gits/myproject\n"), - ]) + _write_jsonl( + f, + [ + _session_meta(), + _user_msg("\n/Users/test/Gits/myproject\n"), + ], + ) entries = list(parse_codex_session(f)) assert entries == [] def test_assistant_message_parsed(self, tmp_path): f = tmp_path / "session.jsonl" - _write_jsonl(f, [ - _session_meta(), - _assistant_msg( - "I found the bug in the authentication middleware. " - "The token expiry check was comparing timestamps incorrectly." - ), - ]) + _write_jsonl( + f, + [ + _session_meta(), + _assistant_msg( + "I found the bug in the authentication middleware. " + "The token expiry check was comparing timestamps incorrectly." + ), + ], + ) entries = list(parse_codex_session(f)) assert len(entries) == 1 assert entries[0]["content_type"] == "assistant_text" @@ -281,12 +292,13 @@ def test_assistant_message_parsed(self, tmp_path): def test_assistant_message_with_code_is_ai_code(self, tmp_path): f = tmp_path / "session.jsonl" - _write_jsonl(f, [ - _session_meta(), - _assistant_msg( - "Here's the fix:\n```python\ndef check_token(t):\n return t.is_valid()\n```" - ), - ]) + _write_jsonl( + f, + [ + _session_meta(), + _assistant_msg("Here's the fix:\n```python\ndef check_token(t):\n return t.is_valid()\n```"), + ], + ) entries = list(parse_codex_session(f)) assert len(entries) == 1 assert entries[0]["content_type"] == "ai_code" @@ -294,10 +306,13 @@ def test_assistant_message_with_code_is_ai_code(self, tmp_path): def test_tool_output_parsed(self, tmp_path): f = tmp_path / "session.jsonl" tool_out = "Process exited with code 0\n" + "some output " * 10 - _write_jsonl(f, [ - _session_meta(), - _tool_output(tool_out), - ]) + _write_jsonl( + f, + [ + _session_meta(), + _tool_output(tool_out), + ], + ) entries = list(parse_codex_session(f)) assert len(entries) == 1 assert entries[0]["content_type"] == "file_read" @@ -306,47 +321,62 @@ def test_tool_json_output_decoded(self, tmp_path): f = tmp_path / "session.jsonl" text = "some tool result text that is definitely long enough to pass the minimum threshold" raw = json.dumps([{"type": "text", "text": text}]) - _write_jsonl(f, [ - _session_meta(), - _tool_output(raw), - ]) + _write_jsonl( + f, + [ + _session_meta(), + _tool_output(raw), + ], + ) entries = list(parse_codex_session(f)) assert len(entries) == 1 assert "some tool result text" in entries[0]["content"] def test_function_call_skipped(self, tmp_path): f = tmp_path / "session.jsonl" - _write_jsonl(f, [ - _session_meta(), - _function_call("brain_search", '{"query": "auth bug"}'), - ]) + _write_jsonl( + f, + [ + _session_meta(), + _function_call("brain_search", '{"query": "auth bug"}'), + ], + ) entries = list(parse_codex_session(f)) assert entries == [] def test_reasoning_skipped(self, tmp_path): f = tmp_path / "session.jsonl" - _write_jsonl(f, [ - _session_meta(), - _reasoning(), - ]) + _write_jsonl( + f, + [ + _session_meta(), + _reasoning(), + ], + ) entries = list(parse_codex_session(f)) assert entries == [] def test_short_user_message_filtered(self, tmp_path): f = tmp_path / "session.jsonl" - _write_jsonl(f, [ - _session_meta(), - _user_msg("ok"), # too short - ]) + _write_jsonl( + f, + [ + _session_meta(), + _user_msg("ok"), # too short + ], + ) entries = list(parse_codex_session(f)) assert entries == [] def test_session_metadata_propagated(self, tmp_path): f = tmp_path / "session.jsonl" - _write_jsonl(f, [ - _session_meta(session_id="abc-123", cwd="/Users/test/Gits/golems"), - _user_msg("Implement the new feature for the dashboard"), - ]) + _write_jsonl( + f, + [ + _session_meta(session_id="abc-123", cwd="/Users/test/Gits/golems"), + _user_msg("Implement the new feature for the dashboard"), + ], + ) entries = list(parse_codex_session(f)) assert len(entries) == 1 e = entries[0] @@ -361,10 +391,13 @@ def test_stack_trace_in_tool_output(self, tmp_path): ' File "auth.py", line 42, in check_token\n' "AssertionError: token expired\n" ) * 3 # ensure long enough - _write_jsonl(f, [ - _session_meta(), - _tool_output(trace), - ]) + _write_jsonl( + f, + [ + _session_meta(), + _tool_output(trace), + ], + ) entries = list(parse_codex_session(f)) assert len(entries) == 1 assert entries[0]["content_type"] == "stack_trace" @@ -372,21 +405,24 @@ def test_stack_trace_in_tool_output(self, tmp_path): def test_mixed_session(self, tmp_path): """Full session with system noise + real content.""" f = tmp_path / "session.jsonl" - _write_jsonl(f, [ - _session_meta(session_id="full-session", cwd="/Users/test/Gits/brainlayer"), - _developer_msg("sandbox_mode instructions ..."), - _user_msg("# AGENTS.md instructions for /Users/test/Gits/brainlayer\n..."), - _user_msg("\n/Users/test/Gits/brainlayer\n"), - _reasoning(), - _user_msg("Add a test for the codex ingestion adapter"), - _function_call("brain_search", '{"query": "codex adapter tests"}'), - _tool_output("No results found in BrainLayer for this query. " * 3), - _assistant_msg( - "I'll write the test. The key cases are: system injection filtering, " - "assistant message classification, and tool output parsing." - ), - _assistant_msg("Here's the implementation:\n```python\ndef test_codex():\n pass\n```"), - ]) + _write_jsonl( + f, + [ + _session_meta(session_id="full-session", cwd="/Users/test/Gits/brainlayer"), + _developer_msg("sandbox_mode instructions ..."), + _user_msg("# AGENTS.md instructions for /Users/test/Gits/brainlayer\n..."), + _user_msg("\n/Users/test/Gits/brainlayer\n"), + _reasoning(), + _user_msg("Add a test for the codex ingestion adapter"), + _function_call("brain_search", '{"query": "codex adapter tests"}'), + _tool_output("No results found in BrainLayer for this query. " * 3), + _assistant_msg( + "I'll write the test. The key cases are: system injection filtering, " + "assistant message classification, and tool output parsing." + ), + _assistant_msg("Here's the implementation:\n```python\ndef test_codex():\n pass\n```"), + ], + ) entries = list(parse_codex_session(f)) types = [e["content_type"] for e in entries] assert "user_message" in types