From 64ca1b5c923c933cc7b5d629179140d7dadb2f6c Mon Sep 17 00:00:00 2001 From: Olasunkanmi Oyinlola Date: Mon, 21 Apr 2025 17:40:42 +0800 Subject: [PATCH 1/2] feat(ui): Enhance chat history and model handling - Update Groq model configuration to max token size to 8192. - Implement chat history restore and history update publish on HistoryUpdated. - Update Gemini model configuration. - Fix model type selection. --- src/application/constant.ts | 2 +- src/commands/handler.ts | 76 ++++---------- src/emitter/interface.ts | 3 +- src/emitter/publisher.ts | 25 ++--- src/extension.ts | 99 ++++-------------- src/infrastructure/logger/logger.ts | 38 ++----- src/llms/gemini/gemini.ts | 144 +++++++-------------------- src/services/agent-state.ts | 26 ++--- src/services/file-storage.ts | 13 +-- src/webview-providers/anthropic.ts | 44 +++----- src/webview-providers/base.ts | 75 ++++---------- src/webview-providers/groq.ts | 50 +++------- src/webview-providers/manager.ts | 115 ++++++++------------- webviewUi/src/components/webview.tsx | 67 +++++-------- webviewUi/src/index.css | 9 +- 15 files changed, 224 insertions(+), 562 deletions(-) diff --git a/src/application/constant.ts b/src/application/constant.ts index a8d89e1..25e69dc 100644 --- a/src/application/constant.ts +++ b/src/application/constant.ts @@ -28,7 +28,7 @@ export enum COMMON { } export const GROQ_CONFIG = { temperature: 0.1, - max_tokens: 60000, + max_tokens: 8192, top_p: 1, stream: false, stop: ["thanks"], diff --git a/src/commands/handler.ts b/src/commands/handler.ts index 7a3d7d1..b69a52c 100644 --- a/src/commands/handler.ts +++ b/src/commands/handler.ts @@ -3,11 +3,7 @@ import Anthropic from "@anthropic-ai/sdk"; import { GenerativeModel, GoogleGenerativeAI } from "@google/generative-ai"; import Groq from "groq-sdk"; import * as vscode from "vscode"; -import { - APP_CONFIG, - COMMON, - generativeAiModels, -} from "../application/constant"; +import { APP_CONFIG, COMMON, generativeAiModels } from "../application/constant"; import { AnthropicWebViewProvider } from "../webview-providers/anthropic"; import { GeminiWebViewProvider } from "../webview-providers/gemini"; import { GroqWebViewProvider } from "../webview-providers/groq"; @@ -39,11 +35,10 @@ export abstract class CodeCommandHandler implements ICodeCommandHandler { private readonly xGrokApiKey: string; private readonly xGrokModel: string; private readonly logger: Logger; - // Todo Need to refactor. Only one instance of a model can be created at a time. Therefore no need to retrieve all model information, only retrieve the required model within the application constructor( private readonly action: string, _context: vscode.ExtensionContext, - errorMessage?: string, + errorMessage?: string ) { this.context = _context; this.error = errorMessage; @@ -74,15 +69,13 @@ export abstract class CodeCommandHandler implements ICodeCommandHandler { return getConfigValue(configKey); } - protected createModel(): - | { generativeAi: string; model: any; modelName: string } - | undefined { + protected createModel(): { generativeAi: string; model: any; modelName: string } | undefined { try { let model; let modelName = ""; if (!this.generativeAi) { vscodeErrorMessage( - "Configuration not found. Go to settings, search for Your coding buddy. Fill up the model and model name", + "Configuration not found. Go to settings, search for Your coding buddy. Fill up the model and model name" ); } if (this.generativeAi === generativeAiModels.GROQ) { @@ -90,7 +83,7 @@ export abstract class CodeCommandHandler implements ICodeCommandHandler { modelName = this.groqModel; if (!apiKey || !modelName) { vscodeErrorMessage( - "Configuration not found. Go to settings, search for Your coding buddy. Fill up the model and model name", + "Configuration not found. Go to settings, search for Your coding buddy. Fill up the model and model name" ); } model = this.createGroqModel(apiKey); @@ -116,9 +109,7 @@ export abstract class CodeCommandHandler implements ICodeCommandHandler { return { generativeAi: this.generativeAi, model, modelName }; } catch (error) { console.error("Error creating model:", error); - vscode.window.showErrorMessage( - "An error occurred while creating the model. Please try again.", - ); + vscode.window.showErrorMessage("An error occurred while creating the model. Please try again."); } } @@ -151,9 +142,7 @@ export abstract class CodeCommandHandler implements ICodeCommandHandler { return new Groq({ apiKey }); } - protected async generateModelResponse( - text: string, - ): Promise { + protected async generateModelResponse(text: string): Promise { try { const activeModel = this.createModel(); if (!activeModel) { @@ -190,7 +179,7 @@ export abstract class CodeCommandHandler implements ICodeCommandHandler { if (!response) { throw new Error( - "Could not generate response. Check your settings, ensure the API keys and Model Name is added properly.", + "Could not generate response. Check your settings, ensure the API keys and Model Name is added properly." ); } if (this.action.includes("chart")) { @@ -199,9 +188,7 @@ export abstract class CodeCommandHandler implements ICodeCommandHandler { return response; } catch (error) { this.logger.error("Error generating response:", error); - vscode.window.showErrorMessage( - "An error occurred while generating the response. Please try again.", - ); + vscode.window.showErrorMessage("An error occurred while generating the response. Please try again."); } } @@ -212,19 +199,12 @@ export abstract class CodeCommandHandler implements ICodeCommandHandler { return inputString; } - async generateGeminiResponse( - model: any, - text: string, - ): Promise { + async generateGeminiResponse(model: any, text: string): Promise { const result = await model.generateContent(text); return result ? await result.response.text() : undefined; } - private async anthropicResponse( - model: Anthropic, - generativeAiModel: string, - userPrompt: string, - ) { + private async anthropicResponse(model: Anthropic, generativeAiModel: string, userPrompt: string) { try { const response = await model.messages.create({ model: generativeAiModel, @@ -238,15 +218,9 @@ export abstract class CodeCommandHandler implements ICodeCommandHandler { } } - private async groqResponse( - model: Groq, - prompt: string, - generativeAiModel: string, - ): Promise { + private async groqResponse(model: Groq, prompt: string, generativeAiModel: string): Promise { try { - const chatHistory = Memory.has(COMMON.ANTHROPIC_CHAT_HISTORY) - ? Memory.get(COMMON.GROQ_CHAT_HISTORY) - : []; + const chatHistory = Memory.has(COMMON.ANTHROPIC_CHAT_HISTORY) ? Memory.get(COMMON.GROQ_CHAT_HISTORY) : []; const params = { messages: [ ...chatHistory, @@ -258,8 +232,7 @@ export abstract class CodeCommandHandler implements ICodeCommandHandler { model: generativeAiModel, }; - const completion: Groq.Chat.ChatCompletion = - await model.chat.completions.create(params); + const completion: Groq.Chat.ChatCompletion = await model.chat.completions.create(params); return completion.choices[0]?.message?.content ?? undefined; } catch (error) { this.logger.error("Error generating response:", error); @@ -270,9 +243,7 @@ export abstract class CodeCommandHandler implements ICodeCommandHandler { abstract createPrompt(text?: string): any; - async generateResponse( - message?: string, - ): Promise { + async generateResponse(message?: string): Promise { this.logger.info(this.action); let prompt; const selectedCode = this.getSelectedWindowArea(); @@ -284,9 +255,7 @@ export abstract class CodeCommandHandler implements ICodeCommandHandler { if (message && selectedCode) { prompt = await this.createPrompt(`${message} \n ${selectedCode}`); } else { - message - ? (prompt = await this.createPrompt(message)) - : (prompt = await this.createPrompt(selectedCode)); + message ? (prompt = await this.createPrompt(message)) : (prompt = await this.createPrompt(selectedCode)); } if (!prompt) { @@ -369,9 +338,7 @@ export abstract class CodeCommandHandler implements ICodeCommandHandler { placeHolder: "Enter instructions for CodeBuddy", ignoreFocusOut: true, validateInput: (text) => { - return text === "" - ? "Enter instructions for CodeBuddy or press Escape to close chat box" - : null; + return text === "" ? "Enter instructions for CodeBuddy or press Escape to close chat box" : null; }, }); return userPrompt; @@ -383,9 +350,7 @@ export abstract class CodeCommandHandler implements ICodeCommandHandler { async execute(message?: string): Promise { try { let prompt: string | undefined; - const response = (await this.generateResponse( - prompt ? prompt : message, - )) as string; + const response = (await this.generateResponse(prompt ? prompt : message)) as string; if (!response) { vscode.window.showErrorMessage("model not reponding, try again later"); return; @@ -418,10 +383,7 @@ export abstract class CodeCommandHandler implements ICodeCommandHandler { break; } } catch (error) { - this.logger.error( - "Error while passing model response to the webview", - error, - ); + this.logger.error("Error while passing model response to the webview", error); } } } diff --git a/src/emitter/interface.ts b/src/emitter/interface.ts index ce60aa0..a285999 100644 --- a/src/emitter/interface.ts +++ b/src/emitter/interface.ts @@ -13,7 +13,8 @@ type AgentEventKeys = | "onFilesRetrieved" | "onStrategizing" | "onModelChange" - | "onModelChangeSuccess"; + | "onModelChangeSuccess" + | "onHistoryUpdated"; export type IAgentEventMap = Record; diff --git a/src/emitter/publisher.ts b/src/emitter/publisher.ts index 140cf91..99a045d 100644 --- a/src/emitter/publisher.ts +++ b/src/emitter/publisher.ts @@ -9,25 +9,16 @@ export class EventEmitter extends BaseEmitter> { onPromptGenerated: vscode.Event = this.createEvent("onQuery"); onThinking: vscode.Event = this.createEvent("onThinking"); onResponse: vscode.Event = this.createEvent("onResponse"); - onSecretChange: vscode.Event = - this.createEvent("onSecretChange"); + onSecretChange: vscode.Event = this.createEvent("onSecretChange"); onBootstrap: vscode.Event = this.createEvent("onBootstrap"); onFileUpload: vscode.Event = this.createEvent("onFileUpload"); - onFileProcessSuccess: vscode.Event = this.createEvent( - "onFileProcessSuccess", - ); - onActiveworkspaceUpdate: vscode.Event = this.createEvent( - "onActiveworkspaceUpdate", - ); - onFilesRetrieved: vscode.Event = - this.createEvent("onFilesRetrieved"); - onStrategizing: vscode.Event = - this.createEvent("onStrategizing"); - onModelChange: vscode.Event = - this.createEvent("onModelChange"); - onModelChangeSuccess: vscode.Event = this.createEvent( - "onModelChangeSuccess", - ); + onFileProcessSuccess: vscode.Event = this.createEvent("onFileProcessSuccess"); + onActiveworkspaceUpdate: vscode.Event = this.createEvent("onActiveworkspaceUpdate"); + onFilesRetrieved: vscode.Event = this.createEvent("onFilesRetrieved"); + onStrategizing: vscode.Event = this.createEvent("onStrategizing"); + onModelChange: vscode.Event = this.createEvent("onModelChange"); + onModelChangeSuccess: vscode.Event = this.createEvent("onModelChangeSuccess"); + onHistoryUpdated: vscode.Event = this.createEvent("onHistoryUpdated"); /** * Emits a generic event with specified status, message, and optional data. diff --git a/src/extension.ts b/src/extension.ts index 3c832e7..409bcb9 100644 --- a/src/extension.ts +++ b/src/extension.ts @@ -1,12 +1,7 @@ import * as fs from "fs"; import * as path from "path"; import * as vscode from "vscode"; -import { - APP_CONFIG, - generativeAiModels, - OLA_ACTIONS, - USER_MESSAGE, -} from "./application/constant"; +import { APP_CONFIG, generativeAiModels, OLA_ACTIONS, USER_MESSAGE } from "./application/constant"; import { Comments } from "./commands/comment"; import { ExplainCode } from "./commands/explain"; import { FixError } from "./commands/fixError"; @@ -72,7 +67,7 @@ async function connectToDatabase(context: vscode.ExtensionContext) { async function createFileDB(context: vscode.ExtensionContext) { try { - const fileUploader = new FileManager(context, "patterns"); + const fileUploader = new FileManager(context, "database"); const files = await fileUploader.getFiles(); if (!files?.find((file) => file.includes("dev.db"))) { await fileUploader.createFile("dev.db"); @@ -91,12 +86,10 @@ export async function activate(context: vscode.ExtensionContext) { const { apiKey, model } = getAPIKeyAndModel("gemini"); FileUploadService.initialize(apiKey); await credentials.initialize(context); - const session: vscode.AuthenticationSession | undefined = - await credentials.getSession(); + const session: vscode.AuthenticationSession | undefined = await credentials.getSession(); logger.info(`Logged into GitHub as ${session?.account.label}`); Memory.getInstance(); - // TODO This is broken. Need to Fix // const index = CodeIndexingService.createInstance(); // Get each of the folders and call the next line for each // const result = await index.buildFunctionStructureMap(); @@ -115,47 +108,17 @@ export async function activate(context: vscode.ExtensionContext) { generateCodeChart, inlineChat, } = OLA_ACTIONS; - const getComment = new Comments( - `${USER_MESSAGE} generates the code comments...`, - context, - ); - const getInLineChat = new InLineChat( - `${USER_MESSAGE} generates a response...`, - context, - ); - const generateOptimizeCode = new OptimizeCode( - `${USER_MESSAGE} optimizes the code...`, - context, - ); - const generateRefactoredCode = new RefactorCode( - `${USER_MESSAGE} refactors the code...`, - context, - ); - const explainCode = new ExplainCode( - `${USER_MESSAGE} explains the code...`, - context, - ); - const generateReview = new ReviewCode( - `${USER_MESSAGE} reviews the code...`, - context, - ); - const codeChartGenerator = new CodeChartGenerator( - `${USER_MESSAGE} creates the code chart...`, - context, - ); - const generateCommitMessage = new GenerateCommitMessage( - `${USER_MESSAGE} generates a commit message...`, - context, - ); - const generateInterviewQuestions = new InterviewMe( - `${USER_MESSAGE} generates interview questions...`, - context, - ); - - const generateUnitTests = new GenerateUnitTest( - `${USER_MESSAGE} generates unit tests...`, - context, - ); + const getComment = new Comments(`${USER_MESSAGE} generates the code comments...`, context); + const getInLineChat = new InLineChat(`${USER_MESSAGE} generates a response...`, context); + const generateOptimizeCode = new OptimizeCode(`${USER_MESSAGE} optimizes the code...`, context); + const generateRefactoredCode = new RefactorCode(`${USER_MESSAGE} refactors the code...`, context); + const explainCode = new ExplainCode(`${USER_MESSAGE} explains the code...`, context); + const generateReview = new ReviewCode(`${USER_MESSAGE} reviews the code...`, context); + const codeChartGenerator = new CodeChartGenerator(`${USER_MESSAGE} creates the code chart...`, context); + const generateCommitMessage = new GenerateCommitMessage(`${USER_MESSAGE} generates a commit message...`, context); + const generateInterviewQuestions = new InterviewMe(`${USER_MESSAGE} generates interview questions...`, context); + + const generateUnitTests = new GenerateUnitTest(`${USER_MESSAGE} generates unit tests...`, context); const actionMap = { [comment]: async () => await getComment.execute(), @@ -165,29 +128,21 @@ export async function activate(context: vscode.ExtensionContext) { [interviewMe]: async () => await generateInterviewQuestions.execute(), [generateUnitTest]: async () => await generateUnitTests.execute(), [fix]: (errorMessage: string) => - new FixError( - `${USER_MESSAGE} finds a solution to the error...`, - context, - errorMessage, - ).execute(errorMessage), + new FixError(`${USER_MESSAGE} finds a solution to the error...`, context, errorMessage).execute(errorMessage), [explain]: async () => await explainCode.execute(), - [commitMessage]: async () => - await generateCommitMessage.execute("commitMessage"), + [commitMessage]: async () => await generateCommitMessage.execute("commitMessage"), [generateCodeChart]: async () => await codeChartGenerator.execute(), [inlineChat]: async () => await getInLineChat.execute(), }; - let subscriptions: vscode.Disposable[] = Object.entries(actionMap).map( - ([action, handler]) => vscode.commands.registerCommand(action, handler), + let subscriptions: vscode.Disposable[] = Object.entries(actionMap).map(([action, handler]) => + vscode.commands.registerCommand(action, handler) ); const selectedGenerativeAiModel = getConfigValue("generativeAi.option"); const quickFix = new CodeActionsProvider(); - quickFixCodeAction = vscode.languages.registerCodeActionsProvider( - { scheme: "file", language: "*" }, - quickFix, - ); + quickFixCodeAction = vscode.languages.registerCodeActionsProvider({ scheme: "file", language: "*" }, quickFix); agentEventEmmitter = new EventEmitter(); @@ -231,22 +186,12 @@ export async function activate(context: vscode.ExtensionContext) { const modelConfig = modelConfigurations[selectedGenerativeAiModel]; const apiKey = getConfigValue(modelConfig.key); const apiModel = getConfigValue(modelConfig.model); - providerManager.initializeProvider( - selectedGenerativeAiModel, - apiKey, - apiModel, - ); + providerManager.initializeProvider(selectedGenerativeAiModel, apiKey, apiModel, true); } - context.subscriptions.push( - ...subscriptions, - quickFixCodeAction, - agentEventEmmitter, - ); + context.subscriptions.push(...subscriptions, quickFixCodeAction, agentEventEmmitter); } catch (error) { Memory.clear(); - vscode.window.showErrorMessage( - "An Error occured while setting up generative AI model", - ); + vscode.window.showErrorMessage("An Error occured while setting up generative AI model"); console.log(error); } } diff --git a/src/infrastructure/logger/logger.ts b/src/infrastructure/logger/logger.ts index a634c53..9f80485 100644 --- a/src/infrastructure/logger/logger.ts +++ b/src/infrastructure/logger/logger.ts @@ -33,11 +33,7 @@ export interface ILoggerConfig { export interface ITelemetry { recordEvent(name: string, properties?: Record): void; - recordMetric( - name: string, - value: number, - tags?: Record, - ): void; + recordMetric(name: string, value: number, tags?: Record): void; startSpan(name: string): ISpan; } @@ -63,20 +59,12 @@ export class Logger { static instance: Logger; constructor(private readonly module: string) {} - public static initialize( - module: string, - config: Partial, - telemetry?: ITelemetry, - ): Logger { + public static initialize(module: string, config: Partial, telemetry?: ITelemetry): Logger { Logger.config = { ...Logger.config, ...config }; if (Logger.config.enableFile && !Logger.config.filePath) { const workspaceFolder = vscode.workspace.workspaceFolders?.[0]; if (workspaceFolder) { - const logDir = path.join( - workspaceFolder.uri.fsPath, - ".codebuddy", - "logs", - ); + const logDir = path.join(workspaceFolder.uri.fsPath, ".codebuddy", "logs"); if (!fs.existsSync(logDir)) { fs.mkdirSync(logDir, { recursive: true }); } @@ -84,9 +72,7 @@ export class Logger { Logger.config.filePath = path.join(logDir, `codebuddy-${date}.log`); } } - if (!Logger.outputChannel) { - Logger.outputChannel = vscode.window.createOutputChannel("CodeBuddy"); - } + Logger.outputChannel ??= vscode.window.createOutputChannel("CodeBuddy"); Logger.telemetry = telemetry; Logger.sessionId = Logger.generateId(); Logger.setTraceId(Logger.generateId()); @@ -165,11 +151,7 @@ export class Logger { const event = this.formatLogEvent(level, message, data); this.logToConsole(event); this.logToFile(event); - if ( - level === LogLevel.INFO || - level === LogLevel.WARN || - level === LogLevel.ERROR - ) { + if (level === LogLevel.INFO || level === LogLevel.WARN || level === LogLevel.ERROR) { this.logToTelemetry(event); } } @@ -197,18 +179,12 @@ export class Logger { this.info(`${operation} completed in ${duration}ms`); if (Logger.telemetry) { - Logger.telemetry.recordMetric( - `duration.${this.module}.${operation}`, - duration, - ); + Logger.telemetry.recordMetric(`duration.${this.module}.${operation}`, duration); } }; } - public traceOperation( - operation: string, - fn: () => Promise, - ): Promise { + public traceOperation(operation: string, fn: () => Promise): Promise { const span = Logger.telemetry?.startSpan(`${this.module}.${operation}`); const endTimer = this.startTimer(operation); diff --git a/src/llms/gemini/gemini.ts b/src/llms/gemini/gemini.ts index 574e3ac..d090031 100644 --- a/src/llms/gemini/gemini.ts +++ b/src/llms/gemini/gemini.ts @@ -15,20 +15,13 @@ import { Memory } from "../../memory/base"; import { CodeBuddyToolProvider } from "../../tools/factory/tool"; import { createPrompt } from "../../utils/prompt"; import { BaseLLM } from "../base"; -import { - GeminiModelResponseType, - ILlmConfig, - GeminiLLMSnapShot, -} from "../interface"; +import { GeminiModelResponseType, ILlmConfig, GeminiLLMSnapShot } from "../interface"; import { Message } from "../message"; import { Logger } from "../../infrastructure/logger/logger"; import { GroqLLM } from "../groq/groq"; import { getAPIKeyAndModel } from "../../utils/utils"; -export class GeminiLLM - extends BaseLLM - implements vscode.Disposable -{ +export class GeminiLLM extends BaseLLM implements vscode.Disposable { private readonly generativeAi: GoogleGenerativeAI; private response: EmbedContentResponse | GenerateContentResult | undefined; protected readonly orchestrator: Orchestrator; @@ -59,11 +52,7 @@ export class GeminiLLM } private intializeDisposable(): void { - this.disposables.push( - vscode.workspace.onDidChangeConfiguration(() => - this.handleConfigurationChange() - ) - ); + this.disposables.push(vscode.workspace.onDidChangeConfiguration(() => this.handleConfigurationChange())); } // TODO handle configuration, when you introduce multiple LLM Agents @@ -72,9 +61,7 @@ export class GeminiLLM } static getInstance(config: ILlmConfig) { - if (!GeminiLLM.instance) { - GeminiLLM.instance = new GeminiLLM(config); - } + GeminiLLM.instance ??= new GeminiLLM(config); return GeminiLLM.instance; } @@ -90,10 +77,7 @@ export class GeminiLLM } } - public async generateText( - prompt: string, - instruction?: string - ): Promise { + public async generateText(prompt: string, instruction?: string): Promise { try { const model = this.getModel(); const result: GenerateContentResult = await model.generateContent(prompt); @@ -107,22 +91,14 @@ export class GeminiLLM getModel(modelParams?: Partial): GenerativeModel { try { - const model: GenerativeModel | undefined = - this.generativeAi.getGenerativeModel({ - model: this.config.model, - tools: modelParams?.tools ?? this.config.tools, - systemInstruction: - modelParams?.systemInstruction ?? this.config.systemInstruction, - generationConfig: { - stopSequences: [ - "Thank you", - "Done", - "End", - "stuck in a loop", - "loop", - ], - }, - }); + const model: GenerativeModel | undefined = this.generativeAi.getGenerativeModel({ + model: this.config.model, + tools: modelParams?.tools ?? this.config.tools, + systemInstruction: modelParams?.systemInstruction ?? this.config.systemInstruction, + generationConfig: { + stopSequences: ["Thank you", "Done", "End", "stuck in a loop", "loop"], + }, + }); if (!model) { throw new Error(`Error retrieving model ${this.config.model}`); } @@ -141,28 +117,19 @@ export class GeminiLLM }; } - async generateContentWithTools( - userInput: string - ): Promise { + async generateContentWithTools(userInput: string): Promise { try { - await this.buildChatHistory( - userInput, - undefined, - undefined, - undefined, - true - ); + await this.buildChatHistory(userInput, undefined, undefined, undefined, true); const prompt = createPrompt(userInput); const contents = Memory.get(COMMON.GEMINI_CHAT_HISTORY) as Content[]; const tools: any = this.getTools(); const model = this.getModel({ systemInstruction: prompt, tools }); - const generateContentResponse: GenerateContentResult = - await model.generateContent({ - contents, - toolConfig: { - functionCallingConfig: { mode: FunctionCallingMode.AUTO }, - }, - }); + const generateContentResponse: GenerateContentResult = await model.generateContent({ + contents, + toolConfig: { + functionCallingConfig: { mode: FunctionCallingMode.AUTO }, + }, + }); return generateContentResponse; } catch (error: any) { throw Error(error); @@ -183,10 +150,7 @@ export class GeminiLLM * @param userInput The original user input. * @returns A promise that resolves to the final result string or undefined if an error occurs. */ - private async processToolCalls( - toolCalls: FunctionCall[], - userInput: string - ): Promise { + private async processToolCalls(toolCalls: FunctionCall[], userInput: string): Promise { let finalResult: string | undefined = undefined; try { let userQuery = userInput; @@ -194,8 +158,7 @@ export class GeminiLLM for (const functionCall of toolCalls) { try { - const functionResult = - await this.handleSingleFunctionCall(functionCall); + const functionResult = await this.handleSingleFunctionCall(functionCall); if (functionCall.name === "think") { const thought = functionResult?.content; @@ -216,13 +179,7 @@ export class GeminiLLM finalResult = userQuery; - await this.buildChatHistory( - userQuery, - functionCall.name, - functionResult, - undefined, - false - ); + await this.buildChatHistory(userQuery, functionCall.name, functionResult, undefined, false); const snapShot = this.createSnapShot({ lastQuery: userQuery, @@ -261,9 +218,7 @@ export class GeminiLLM } } - async processUserQuery( - userInput: string - ): Promise { + async processUserQuery(userInput: string): Promise { let finalResult: string | GenerateContentResult | undefined; let userQuery = userInput; const MAX_BASE_CALLS = 5; @@ -285,25 +240,13 @@ export class GeminiLLM while (callCount < this.calculateDynamicCallLimit(userQuery)) { const timeoutPromise = new Promise((_, reject) => - setTimeout( - () => reject(new Error("TImeout Exceeded")), - this.timeOutMs - ) + setTimeout(() => reject(new Error("TImeout Exceeded")), this.timeOutMs) ); const responsePromise = await this.generateContentWithTools(userQuery); - const result = (await Promise.race([ - responsePromise, - timeoutPromise, - ])) as GeminiModelResponseType; + const result = (await Promise.race([responsePromise, timeoutPromise])) as GeminiModelResponseType; this.response = result; if (result && "response" in result) { - const { - text, - usageMetadata, - functionCalls, - candidates, - promptFeedback, - } = result.response; + const { text, usageMetadata, functionCalls, candidates, promptFeedback } = result.response; if ((functionCalls?.()?.length ?? 0) === 0) { finalResult = text(); break; @@ -313,9 +256,7 @@ export class GeminiLLM const tokenCount = usageMetadata?.totalTokenCount ?? 0; const toolCalls = functionCalls ? functionCalls() : []; const currentCallSignatures = toolCalls - ? toolCalls - .map((call) => `${call.name}:${JSON.stringify(call.args)}`) - .join(";") + ? toolCalls.map((call) => `${call.name}:${JSON.stringify(call.args)}`).join(";") : ""; if (this.lastFunctionCalls.has(currentCallSignatures)) { finalResult = await this.groqLLM.generateText(userInput); @@ -328,9 +269,7 @@ export class GeminiLLM } this.lastFunctionCalls.add(currentCallSignatures); if (this.lastFunctionCalls.size > 10) { - this.lastFunctionCalls = new Set( - [...this.lastFunctionCalls].slice(-10) - ); + this.lastFunctionCalls = new Set([...this.lastFunctionCalls].slice(-10)); } if (toolCalls && toolCalls.length > 0) { finalResult = await this.processToolCalls(toolCalls, userQuery); @@ -364,10 +303,7 @@ export class GeminiLLM } const snapshot = Memory.get(COMMON.GEMINI_SNAPSHOT); if (snapshot?.length > 0) { - Memory.removeItems( - COMMON.GEMINI_SNAPSHOT, - Memory.get(COMMON.GEMINI_SNAPSHOT).length - ); + Memory.removeItems(COMMON.GEMINI_SNAPSHOT, Memory.get(COMMON.GEMINI_SNAPSHOT).length); } this.orchestrator.publish("onQuery", String(finalResult)); return finalResult; @@ -384,10 +320,7 @@ export class GeminiLLM } } - private async handleSingleFunctionCall( - functionCall: FunctionCall, - attempt: number = 0 - ): Promise { + private async handleSingleFunctionCall(functionCall: FunctionCall, attempt: number = 0): Promise { const MAX_RETRIES = 3; const args = functionCall.args as Record; const name = functionCall.name; @@ -406,10 +339,7 @@ export class GeminiLLM }; } catch (error: any) { if (attempt < MAX_RETRIES) { - console.warn( - `Retry attempt ${attempt + 1} for function ${name}`, - JSON.stringify({ error, args }) - ); + console.warn(`Retry attempt ${attempt + 1} for function ${name}`, JSON.stringify({ error, args })); return this.handleSingleFunctionCall(functionCall, attempt + 1); } } @@ -460,9 +390,7 @@ export class GeminiLLM }) ); - const observationResult = await chat.sendMessage( - `Tool result: ${JSON.stringify(functionResponse)}` - ); + const observationResult = await chat.sendMessage(`Tool result: ${JSON.stringify(functionResponse)}`); chatHistory.push( Message.of({ role: "user", @@ -524,9 +452,7 @@ export class GeminiLLM content: finalResult, }); - let chatHistory = Memory.has(COMMON.GROQ_CHAT_HISTORY) - ? Memory.get(COMMON.GROQ_CHAT_HISTORY) - : [systemMessage]; + let chatHistory = Memory.has(COMMON.GROQ_CHAT_HISTORY) ? Memory.get(COMMON.GROQ_CHAT_HISTORY) : [systemMessage]; chatHistory = [...chatHistory, systemMessage]; this.orchestrator.publish("onQuery", String(finalResult)); diff --git a/src/services/agent-state.ts b/src/services/agent-state.ts index 6c82535..d6af78c 100644 --- a/src/services/agent-state.ts +++ b/src/services/agent-state.ts @@ -5,7 +5,7 @@ import { FileStorage, IStorage } from "./file-storage"; export class AgentService { private static instance: AgentService; - private storage: IStorage; + private readonly storage: IStorage; private constructor(storage: IStorage) { this.storage = storage; @@ -19,9 +19,7 @@ export class AgentService { } async getState(agentId: string): Promise { - return this.storage.get( - `${COMMON.AGENT_STATE_PREFIX}_${agentId}`, - ); + return this.storage.get(`${COMMON.AGENT_STATE_PREFIX}_${agentId}`); } async saveState(agentId: string, state: AgentState): Promise { @@ -29,30 +27,18 @@ export class AgentService { } async getChatHistory(agentId: string): Promise { - return ( - (await this.storage.get( - `${COMMON.CHAT_HISTORY_PREFIX}_${agentId}`, - )) || [] - ); + return (await this.storage.get(`${COMMON.CHAT_HISTORY_PREFIX}_${agentId}`)) || []; } async saveChatHistory(agentId: string, history: any[]): Promise { - return this.storage.set( - `${COMMON.CHAT_HISTORY_PREFIX}_${agentId}`, - history, - ); + return this.storage.set(`${COMMON.CHAT_HISTORY_PREFIX}_${agentId}`, history); } async getSnapshot(agentId: string): Promise { - return this.storage.get( - `${COMMON.SNAPSHOT_PREFIX}_${agentId}`, - ); + return this.storage.get(`${COMMON.SNAPSHOT_PREFIX}_${agentId}`); } - async saveSnapshot( - agentId: string, - snapshot: GeminiLLMSnapShot, - ): Promise { + async saveSnapshot(agentId: string, snapshot: GeminiLLMSnapShot): Promise { return this.storage.set(`${COMMON.SNAPSHOT_PREFIX}_${agentId}`, snapshot); } diff --git a/src/services/file-storage.ts b/src/services/file-storage.ts index 206164d..38f73d8 100644 --- a/src/services/file-storage.ts +++ b/src/services/file-storage.ts @@ -12,13 +12,10 @@ export interface IStorage { } export class FileStorage implements IStorage { - private storagePath: string; + private readonly storagePath: string; constructor() { - this.storagePath = path.join( - vscode.workspace.workspaceFolders?.[0]?.uri.fsPath || "", - ".codebuddy", - ); + this.storagePath = path.join(vscode.workspace.workspaceFolders?.[0]?.uri.fsPath ?? "", ".codebuddy"); if (!fs.existsSync(this.storagePath)) { fs.mkdirSync(this.storagePath, { recursive: true }); } @@ -45,11 +42,7 @@ export class FileStorage implements IStorage { async set(key: string, value: T): Promise { try { const filePath = this.getFilePath(key); - await fs.promises.writeFile( - filePath, - JSON.stringify(value, null, 2), - "utf-8", - ); + await fs.promises.writeFile(filePath, JSON.stringify(value, null, 2), "utf-8"); } catch (error) { console.error(`Error storing data for key ${key}:`, error); throw new Error(`Failed to store data: ${error}`); diff --git a/src/webview-providers/anthropic.ts b/src/webview-providers/anthropic.ts index 37a230d..af4e75d 100644 --- a/src/webview-providers/anthropic.ts +++ b/src/webview-providers/anthropic.ts @@ -1,16 +1,8 @@ import * as vscode from "vscode"; import { BaseWebViewProvider } from "./base"; -import { - COMMON, - generativeAiModels, - GROQ_CONFIG, -} from "../application/constant"; +import { COMMON, generativeAiModels, GROQ_CONFIG } from "../application/constant"; import Anthropic from "@anthropic-ai/sdk"; -import { - createAnthropicClient, - getGenerativeAiModel, - getXGroKBaseURL, -} from "../utils/utils"; +import { createAnthropicClient, getGenerativeAiModel, getXGroKBaseURL } from "../utils/utils"; import { Memory } from "../memory/base"; import { IMessageInput, Message } from "../llms/message"; @@ -22,16 +14,13 @@ export class AnthropicWebViewProvider extends BaseWebViewProvider { apiKey: string, generativeAiModel: string, context: vscode.ExtensionContext, - protected baseUrl?: string, + protected baseUrl?: string ) { super(extensionUri, apiKey, generativeAiModel, context); this.model = createAnthropicClient(this.apiKey, this.baseUrl); } - public async sendResponse( - response: string, - currentChat: string, - ): Promise { + public async sendResponse(response: string, currentChat: string): Promise { try { const type = currentChat === "bot" ? "bot-response" : "user-input"; if (currentChat === "bot") { @@ -39,25 +28,20 @@ export class AnthropicWebViewProvider extends BaseWebViewProvider { Message.of({ role: "assistant", content: response, - }), + }) ); } else { this.chatHistory.push( Message.of({ role: "user", content: response, - }), + }) ); } if (this.chatHistory.length === 2) { - const chatHistory = Memory.has(COMMON.ANTHROPIC_CHAT_HISTORY) - ? Memory.get(COMMON.ANTHROPIC_CHAT_HISTORY) - : []; - Memory.set(COMMON.ANTHROPIC_CHAT_HISTORY, [ - ...chatHistory, - ...this.chatHistory, - ]); + const chatHistory = Memory.has(COMMON.ANTHROPIC_CHAT_HISTORY) ? Memory.get(COMMON.ANTHROPIC_CHAT_HISTORY) : []; + Memory.set(COMMON.ANTHROPIC_CHAT_HISTORY, [...chatHistory, ...this.chatHistory]); } return await this.currentWebView?.webview.postMessage({ type, @@ -68,13 +52,17 @@ export class AnthropicWebViewProvider extends BaseWebViewProvider { } } - async generateResponse(message: string): Promise { + async generateResponse(message: string, metaData?: any): Promise { try { + let context: string | undefined; + if (metaData?.context.length > 0) { + context = await this.getContext(metaData.context); + } const { max_tokens } = GROQ_CONFIG; if (getGenerativeAiModel() === generativeAiModels.GROK) { this.baseUrl = getXGroKBaseURL(); } - const userMessage = Message.of({ role: "user", content: message }); + const userMessage = Message.of({ role: "user", content: `${message} \n context: ${context}` }); let chatHistory = Memory.has(COMMON.ANTHROPIC_CHAT_HISTORY) ? Memory.get(COMMON.ANTHROPIC_CHAT_HISTORY) : [userMessage]; @@ -94,9 +82,7 @@ export class AnthropicWebViewProvider extends BaseWebViewProvider { } catch (error) { console.error(error); Memory.set(COMMON.ANTHROPIC_CHAT_HISTORY, []); - vscode.window.showErrorMessage( - "Model not responding, please resend your question", - ); + vscode.window.showErrorMessage("Model not responding, please resend your question"); } } diff --git a/src/webview-providers/base.ts b/src/webview-providers/base.ts index d2a32c6..dca79a1 100644 --- a/src/webview-providers/base.ts +++ b/src/webview-providers/base.ts @@ -1,9 +1,6 @@ import * as vscode from "vscode"; import { Orchestrator } from "../agents/orchestrator"; -import { - FolderEntry, - IContextInfo, -} from "../application/interfaces/workspace.interface"; +import { FolderEntry, IContextInfo } from "../application/interfaces/workspace.interface"; import { IEventPayload } from "../emitter/interface"; import { Logger } from "../infrastructure/logger/logger"; import { AgentService } from "../services/agent-state"; @@ -31,7 +28,7 @@ export abstract class BaseWebViewProvider implements vscode.Disposable { private readonly _extensionUri: vscode.Uri, protected readonly apiKey: string, protected readonly generativeAiModel: string, - context: vscode.ExtensionContext, + context: vscode.ExtensionContext ) { this.fileManager = FileManager.initialize(context, "files"); this.fileService = FileService.getInstance(); @@ -49,16 +46,10 @@ export abstract class BaseWebViewProvider implements vscode.Disposable { this.orchestrator.onThinking(this.handleModelResponseEvent.bind(this)), this.orchestrator.onUpdate(this.handleModelResponseEvent.bind(this)), this.orchestrator.onError(this.handleModelResponseEvent.bind(this)), - this.orchestrator.onSecretChange( - this.handleModelResponseEvent.bind(this), - ), - this.orchestrator.onActiveworkspaceUpdate( - this.handleGenericEvents.bind(this), - ), + this.orchestrator.onSecretChange(this.handleModelResponseEvent.bind(this)), + this.orchestrator.onActiveworkspaceUpdate(this.handleGenericEvents.bind(this)), this.orchestrator.onFileUpload(this.handleModelResponseEvent.bind(this)), - this.orchestrator.onStrategizing( - this.handleModelResponseEvent.bind(this), - ), + this.orchestrator.onStrategizing(this.handleModelResponseEvent.bind(this)) ); } @@ -78,9 +69,7 @@ export abstract class BaseWebViewProvider implements vscode.Disposable { webviewView.webview.options = webviewOptions; if (!this.apiKey) { - vscode.window.showErrorMessage( - "API key not configured. Check your settings.", - ); + vscode.window.showErrorMessage("API key not configured. Check your settings."); return; } @@ -93,10 +82,7 @@ export abstract class BaseWebViewProvider implements vscode.Disposable { } private async setWebviewHtml(view: vscode.WebviewView): Promise { - view.webview.html = getWebviewContent( - this.currentWebView?.webview!, - this._extensionUri, - ); + view.webview.html = getWebviewContent(this.currentWebView?.webview!, this._extensionUri); } private async getFiles() { @@ -111,10 +97,8 @@ export abstract class BaseWebViewProvider implements vscode.Disposable { private async publishWorkSpace(): Promise { try { - const filesAndDirs: IContextInfo = - await this.workspaceService.getContextInfo(true); - const workspaceFiles: Map | undefined = - filesAndDirs.workspaceFiles; + const filesAndDirs: IContextInfo = await this.workspaceService.getContextInfo(true); + const workspaceFiles: Map | undefined = filesAndDirs.workspaceFiles; if (!workspaceFiles) { this.logger.warn("There no files within the workspace"); return; @@ -136,17 +120,7 @@ export abstract class BaseWebViewProvider implements vscode.Disposable { let response: any; switch (message.command) { case "user-input": - if (message.metaData.mode === "Agent") { - response = await this.generateResponse( - message.message, - message.metaData, - ); - } else { - response = await this.generateResponse( - message.message, - message.metaData, - ); - } + response = await this.generateResponse(message.message, message.metaData); if (response) { await this.sendResponse(formatText(response), "bot"); } @@ -161,16 +135,13 @@ export abstract class BaseWebViewProvider implements vscode.Disposable { await this.orchestrator.publish("onModelChange", message); break; //Publish an event instead to prevent cyclic dependendency - // case "chat-history-import": - // await this.agentService.saveChatHistory( - // WebViewProviderManager.AgentId, - // JSON.parse(message.message), - // ); - // break; + case "messages-updated": + this.orchestrator.publish("onHistoryUpdated", message); + break; default: throw new Error("Unknown command"); } - }), + }) ); } catch (error) { this.logger.error("Message handler failed", error); @@ -186,20 +157,11 @@ export abstract class BaseWebViewProvider implements vscode.Disposable { } public handleModelResponseEvent(event: IEventPayload) { - this.sendResponse( - formatText(event.message), - event.message === "folders" ? "bootstrap" : "bot", - ); + this.sendResponse(formatText(event.message), event.message === "folders" ? "bootstrap" : "bot"); } - abstract generateResponse( - message?: string, - metaData?: Record, - ): Promise; + abstract generateResponse(message?: string, metaData?: Record): Promise; - abstract sendResponse( - response: string, - currentChat?: string, - ): Promise; + abstract sendResponse(response: string, currentChat?: string): Promise; public dispose(): void { this.disposables.forEach((d) => d.dispose()); @@ -207,8 +169,7 @@ export abstract class BaseWebViewProvider implements vscode.Disposable { async getContext(files: string[]) { try { - const filesContent: Map | undefined = - await this.fileService.getFilesContent(files); + const filesContent: Map | undefined = await this.fileService.getFilesContent(files); if (filesContent && filesContent.size > 0) { return Array.from(filesContent.values()).join("\n"); } diff --git a/src/webview-providers/groq.ts b/src/webview-providers/groq.ts index e203bf7..a48aa09 100644 --- a/src/webview-providers/groq.ts +++ b/src/webview-providers/groq.ts @@ -9,12 +9,7 @@ export class GroqWebViewProvider extends BaseWebViewProvider { chatHistory: IMessageInput[] = []; readonly model: Groq; private static instance: GroqWebViewProvider; - constructor( - extensionUri: vscode.Uri, - apiKey: string, - generativeAiModel: string, - context: vscode.ExtensionContext, - ) { + constructor(extensionUri: vscode.Uri, apiKey: string, generativeAiModel: string, context: vscode.ExtensionContext) { super(extensionUri, apiKey, generativeAiModel, context); this.model = new Groq({ apiKey: this.apiKey, @@ -26,22 +21,14 @@ export class GroqWebViewProvider extends BaseWebViewProvider { extensionUri: vscode.Uri, apiKey: string, generativeAiModel: string, - context: vscode.ExtensionContext, + context: vscode.ExtensionContext ) { if (!GroqWebViewProvider.instance) { - GroqWebViewProvider.instance = new GroqWebViewProvider( - extensionUri, - apiKey, - generativeAiModel, - context, - ); + GroqWebViewProvider.instance = new GroqWebViewProvider(extensionUri, apiKey, generativeAiModel, context); } } - public async sendResponse( - response: string, - participant: string, - ): Promise { + public async sendResponse(response: string, participant: string): Promise { try { const type = participant === "bot" ? "bot-response" : "user-input"; if (participant === "bot") { @@ -49,24 +36,19 @@ export class GroqWebViewProvider extends BaseWebViewProvider { Message.of({ role: "system", content: response, - }), + }) ); } else { this.chatHistory.push( Message.of({ role: "user", content: response, - }), + }) ); } if (this.chatHistory.length === 2) { - const chatHistory = Memory.has(COMMON.GROQ_CHAT_HISTORY) - ? Memory.get(COMMON.GROQ_CHAT_HISTORY) - : []; - Memory.set(COMMON.GROQ_CHAT_HISTORY, [ - ...chatHistory, - ...this.chatHistory, - ]); + const chatHistory = Memory.has(COMMON.GROQ_CHAT_HISTORY) ? Memory.get(COMMON.GROQ_CHAT_HISTORY) : []; + Memory.set(COMMON.GROQ_CHAT_HISTORY, [...chatHistory, ...this.chatHistory]); } // Once the agent task is done, map the memory into the llm brain. // Send the final answer to the webview here. @@ -79,13 +61,15 @@ export class GroqWebViewProvider extends BaseWebViewProvider { } } - async generateResponse(message: string): Promise { + async generateResponse(message: string, metaData?: any): Promise { try { + let context: string | undefined; + if (metaData?.context.length > 0) { + context = await this.getContext(metaData.context); + } const { temperature, max_tokens, top_p, stop } = GROQ_CONFIG; - const userMessage = Message.of({ role: "user", content: message }); - let chatHistory = Memory.has(COMMON.GROQ_CHAT_HISTORY) - ? Memory.get(COMMON.GROQ_CHAT_HISTORY) - : [userMessage]; + const userMessage = Message.of({ role: "user", content: `${message} \n context: ${context}` }); + let chatHistory = Memory.has(COMMON.GROQ_CHAT_HISTORY) ? Memory.get(COMMON.GROQ_CHAT_HISTORY) : [userMessage]; chatHistory = [...chatHistory, userMessage]; @@ -104,9 +88,7 @@ export class GroqWebViewProvider extends BaseWebViewProvider { } catch (error) { console.error(error); Memory.set(COMMON.GROQ_CHAT_HISTORY, []); - vscode.window.showErrorMessage( - "Model not responding, please resend your question", - ); + vscode.window.showErrorMessage("Model not responding, please resend your question"); return; } } diff --git a/src/webview-providers/manager.ts b/src/webview-providers/manager.ts index 6f46eb0..052410f 100644 --- a/src/webview-providers/manager.ts +++ b/src/webview-providers/manager.ts @@ -4,7 +4,7 @@ import { generativeAiModels } from "../application/constant"; import { IEventPayload } from "../emitter/interface"; import { AgentService } from "../services/agent-state"; import { Logger } from "../services/telemetry"; -import { getAPIKeyAndModel, getConfigValue } from "../utils/utils"; +import { getAPIKeyAndModel } from "../utils/utils"; import { AnthropicWebViewProvider } from "./anthropic"; import { BaseWebViewProvider } from "./base"; import { DeepseekWebViewProvider } from "./deepseek"; @@ -20,34 +20,28 @@ export class WebViewProviderManager implements vscode.Disposable { extensionUri: vscode.Uri, apiKey: string, model: string, - context: vscode.ExtensionContext, + context: vscode.ExtensionContext ) => BaseWebViewProvider > = new Map(); private webviewView: vscode.WebviewView | undefined; private disposables: vscode.Disposable[] = []; - private viewProvider: vscode.WebviewViewProvider | undefined; private webviewViewProvider: vscode.WebviewViewProvider | undefined; protected readonly orchestrator: Orchestrator; private readonly agentService: AgentService; static AgentId = "agentId"; // TODO This is hardcoded for now,in upcoming versions, requests will be tagged to respective agents. private readonly logger = new Logger(WebViewProviderManager.name); - private constructor(private extensionContext: vscode.ExtensionContext) { + private constructor(private readonly extensionContext: vscode.ExtensionContext) { this.orchestrator = Orchestrator.getInstance(); this.agentService = AgentService.getInstance(); this.registerProviders(); - this.disposables.push( - this.orchestrator.onModelChange(this.handleModelChange.bind(this)), - ); + this.disposables.push(this.orchestrator.onModelChange(this.handleModelChange.bind(this))); + this.disposables.push(this.orchestrator.onHistoryUpdated(this.handleHistoryUpdate.bind(this))); } - public static getInstance( - extensionContext: vscode.ExtensionContext, - ): WebViewProviderManager { + public static getInstance(extensionContext: vscode.ExtensionContext): WebViewProviderManager { if (!WebViewProviderManager.instance) { - WebViewProviderManager.instance = new WebViewProviderManager( - extensionContext, - ); + WebViewProviderManager.instance = new WebViewProviderManager(extensionContext); } return WebViewProviderManager.instance; } @@ -55,18 +49,9 @@ export class WebViewProviderManager implements vscode.Disposable { private registerProviders(): void { this.providerRegistry.set(generativeAiModels.GEMINI, GeminiWebViewProvider); this.providerRegistry.set(generativeAiModels.GROQ, GroqWebViewProvider); - this.providerRegistry.set( - generativeAiModels.ANTHROPIC, - AnthropicWebViewProvider, - ); - this.providerRegistry.set( - generativeAiModels.GROK, - AnthropicWebViewProvider, - ); - this.providerRegistry.set( - generativeAiModels.DEEPSEEK, - DeepseekWebViewProvider, - ); + this.providerRegistry.set(generativeAiModels.ANTHROPIC, AnthropicWebViewProvider); + this.providerRegistry.set(generativeAiModels.GROK, AnthropicWebViewProvider); + this.providerRegistry.set(generativeAiModels.DEEPSEEK, DeepseekWebViewProvider); } registerWebViewProvider(): vscode.Disposable | undefined { @@ -83,39 +68,25 @@ export class WebViewProviderManager implements vscode.Disposable { const disposable = vscode.window.registerWebviewViewProvider( BaseWebViewProvider.viewId, this.webviewViewProvider, - { webviewOptions: { retainContextWhenHidden: true } }, + { webviewOptions: { retainContextWhenHidden: true } } ); this.disposables.push(disposable); return disposable; } } - // NOTE. This could be better off as modelName instead of modelType, once we are focusing on specific Models - private createProvider( - modelType: string, - apiKey: string, - model: string, - ): BaseWebViewProvider | undefined { - const providerClass = this.providerRegistry.get(modelType); + private createProvider(modelName: string, apiKey: string, model: string): BaseWebViewProvider | undefined { + const providerClass = this.providerRegistry.get(modelName); if (!providerClass) { - this.logger.warn(`Provider for model type ${modelType} not found`); + this.logger.warn(`Provider for model type ${modelName} not found`); return; } - return new providerClass( - this.extensionContext.extensionUri, - apiKey, - model, - this.extensionContext, - ); + return new providerClass(this.extensionContext.extensionUri, apiKey, model, this.extensionContext); } - private async switchProvider( - modelType: string, - apiKey: string, - model: string, - ): Promise { + private async switchProvider(modelName: string, apiKey: string, model: string, onload: boolean): Promise { try { - const newProvider = this.createProvider(modelType, apiKey, model); + const newProvider = this.createProvider(modelName, apiKey, model); if (!newProvider) { return; } @@ -127,7 +98,7 @@ export class WebViewProviderManager implements vscode.Disposable { if (this.webviewView) { await this.currentProvider.resolveWebviewView(this.webviewView); } - if (chatHistory?.length > 0) { + if (chatHistory.messages?.length > 0 && onload) { await this.restoreChatHistory(); } const webviewProviderDisposable = this.registerWebViewProvider(); @@ -139,8 +110,8 @@ export class WebViewProviderManager implements vscode.Disposable { "onModelChangeSuccess", JSON.stringify({ success: true, - modelType, - }), + modelName, + }) ); } catch (error: any) { this.logger.error(`Error switching provider: ${error}`); @@ -148,19 +119,15 @@ export class WebViewProviderManager implements vscode.Disposable { "onModelChangeSuccess", JSON.stringify({ success: false, - modelType, - }), + modelName, + }) ); throw new Error(error); } } - async initializeProvider( - modelType: string, - apiKey: string, - model: string, - ): Promise { - await this.switchProvider(modelType, apiKey, model); + async initializeProvider(modelName: string, apiKey: string, model: string, onload: boolean): Promise { + await this.switchProvider(modelName, apiKey, model, onload); } private async handleModelChange(event: IEventPayload): Promise { @@ -179,33 +146,37 @@ export class WebViewProviderManager implements vscode.Disposable { if (!apiKey) { this.logger.warn(`${modelName} APIkey is required`); } - await this.switchProvider(modelName, apiKey, model); + await this.switchProvider(modelName, apiKey, model, false); } catch (error: any) { this.logger.error("Error handling model change", error); throw new Error(error.message); } } - private async getCurrentHistory(): Promise { - const history = await this.agentService.getChatHistory( - WebViewProviderManager.AgentId, - ); + private async getCurrentHistory(): Promise { + const history = await this.agentService.getChatHistory(WebViewProviderManager.AgentId); return history; } private async restoreChatHistory() { const history = await this.getCurrentHistory(); - await this.webviewView?.webview.postMessage({ - type: "chat-history-export", - message: JSON.stringify(history), - }); + setTimeout(async () => { + const lastTenMessages = history.messages.slice(-10); + await this.webviewView?.webview.postMessage({ + type: "chat-history", + message: JSON.stringify(lastTenMessages), + }); + }, 6000); } async setCurrentHistory(data: any[]): Promise { - await this.agentService.saveChatHistory( - WebViewProviderManager.AgentId, - data, - ); + await this.agentService.saveChatHistory(WebViewProviderManager.AgentId, data); + } + + async handleHistoryUpdate({ type, message }: IEventPayload) { + if (message.command === "messages-updated" && message.messages?.length) { + await this.setCurrentHistory(message); + } } getCurrentProvider(): BaseWebViewProvider | undefined { @@ -217,9 +188,7 @@ export class WebViewProviderManager implements vscode.Disposable { this.currentProvider.dispose(); } this.disposables.forEach((d) => d.dispose()); - this.extensionContext.subscriptions.forEach((subscription) => - subscription.dispose(), - ); + this.extensionContext.subscriptions.forEach((subscription) => subscription.dispose()); this.disposables = []; } } diff --git a/webviewUi/src/components/webview.tsx b/webviewUi/src/components/webview.tsx index cf9eef0..dbe1a79 100644 --- a/webviewUi/src/components/webview.tsx +++ b/webviewUi/src/components/webview.tsx @@ -12,12 +12,7 @@ export interface ExtensionMessage { payload: any; } -import { - VSCodeTextArea, - VSCodePanels, - VSCodePanelTab, - VSCodePanelView, -} from "@vscode/webview-ui-toolkit/react"; +import { VSCodeTextArea, VSCodePanels, VSCodePanelTab, VSCodePanelView } from "@vscode/webview-ui-toolkit/react"; import type hljs from "highlight.js"; import { useEffect, useState } from "react"; import { codeBuddyMode, modelOptions } from "../constants/constant"; @@ -64,7 +59,6 @@ export const WebviewUI = () => { const message = event.data; switch (message.type) { case "bot-response": - setIsBotLoading(false); setMessages((prevMessages) => [ ...(prevMessages || []), { @@ -76,6 +70,15 @@ export const WebviewUI = () => { break; case "bootstrap": setFolders(message); + break; + case "chat-history": + try { + setMessages((prevMessages) => [...JSON.parse(message.message), ...(prevMessages || [])]); + } catch (error: any) { + console.log(error); + throw new Error(error.message); + } + break; case "error": console.error("Extension error", message.payload); @@ -89,11 +92,19 @@ export const WebviewUI = () => { }; window.addEventListener("message", messageHandler); highlightCodeBlocks(hljsApi, messages); + setIsBotLoading(false); return () => { window.removeEventListener("message", messageHandler); }; }, [messages]); + useEffect(() => { + vsCode.postMessage({ + command: "messages-updated", + messages, + }); + }, [messages]); + const handleContextChange = (value: string) => { setSelectedContext(value); }; @@ -129,8 +140,6 @@ export const WebviewUI = () => { }; const handleSend = () => { - // TODO Compare the data to be sent to the data recieved. - // TODO Since the folders will come through the parent, you can filter the values with the folders and files if (!userInput.trim()) return; setMessages((previousMessages) => [ @@ -179,10 +188,7 @@ export const WebviewUI = () => { OTHERS - +
@@ -190,11 +196,7 @@ export const WebviewUI = () => { msg.type === "bot" ? ( ) : ( - + ) )} {isBotLoading && } @@ -203,9 +205,9 @@ export const WebviewUI = () => {
- 1 - 2 - 3 + In Dev + In Dev + In Dev
{ Active workspace: - {selectedContext.includes(activeEditor) - ? "" - : `${activeEditor}`} - - - {Array.from( - new Set(selectedContext.split("@").join(", ").split(", ")) - ).join(" ")} + {selectedContext.includes(activeEditor) ? "" : `${activeEditor}`} + {Array.from(new Set(selectedContext.split("@").join(", ").split(", "))).join(" ")}
- + {
- + .codicon { display: flex; gap: 16px; padding: 20px; - background-color: var(--vscode-editor-background); + /* background-color: var(--vscode-editor-background); */ border-radius: 8px; max-width: 800px; margin-bottom: 15px; @@ -555,9 +555,7 @@ hr { } .url-grid-container { - display: grid; - grid-template-columns: repeat(3, 1fr); - /* Exactly 3 cards per row */ + display: inline; gap: 20px; padding: 16px; width: 100%; @@ -755,5 +753,4 @@ hr { .vscodePanels { height: 700px; -} - +} \ No newline at end of file From a86803b37d78cf82a8bb6f09fff9be543cac5206 Mon Sep 17 00:00:00 2001 From: Olasunkanmi Oyinlola Date: Mon, 21 Apr 2025 17:41:31 +0800 Subject: [PATCH 2/2] git push --set-upstream origin chat_history_handling --- src/commands/handler.ts | 75 +++++++++---- src/emitter/publisher.ts | 27 +++-- src/extension.ts | 97 +++++++++++++---- src/infrastructure/logger/logger.ts | 34 ++++-- src/llms/gemini/gemini.ts | 156 ++++++++++++++++++++-------- src/services/agent-state.ts | 24 ++++- src/services/file-storage.ts | 11 +- src/services/url-reranker.ts | 6 +- src/webview-providers/anthropic.ts | 46 ++++++-- src/webview-providers/base.ts | 59 ++++++++--- src/webview-providers/groq.ts | 52 +++++++--- src/webview-providers/manager.ts | 83 +++++++++++---- 12 files changed, 507 insertions(+), 163 deletions(-) diff --git a/src/commands/handler.ts b/src/commands/handler.ts index b69a52c..92ccc73 100644 --- a/src/commands/handler.ts +++ b/src/commands/handler.ts @@ -3,7 +3,11 @@ import Anthropic from "@anthropic-ai/sdk"; import { GenerativeModel, GoogleGenerativeAI } from "@google/generative-ai"; import Groq from "groq-sdk"; import * as vscode from "vscode"; -import { APP_CONFIG, COMMON, generativeAiModels } from "../application/constant"; +import { + APP_CONFIG, + COMMON, + generativeAiModels, +} from "../application/constant"; import { AnthropicWebViewProvider } from "../webview-providers/anthropic"; import { GeminiWebViewProvider } from "../webview-providers/gemini"; import { GroqWebViewProvider } from "../webview-providers/groq"; @@ -38,7 +42,7 @@ export abstract class CodeCommandHandler implements ICodeCommandHandler { constructor( private readonly action: string, _context: vscode.ExtensionContext, - errorMessage?: string + errorMessage?: string, ) { this.context = _context; this.error = errorMessage; @@ -69,13 +73,15 @@ export abstract class CodeCommandHandler implements ICodeCommandHandler { return getConfigValue(configKey); } - protected createModel(): { generativeAi: string; model: any; modelName: string } | undefined { + protected createModel(): + | { generativeAi: string; model: any; modelName: string } + | undefined { try { let model; let modelName = ""; if (!this.generativeAi) { vscodeErrorMessage( - "Configuration not found. Go to settings, search for Your coding buddy. Fill up the model and model name" + "Configuration not found. Go to settings, search for Your coding buddy. Fill up the model and model name", ); } if (this.generativeAi === generativeAiModels.GROQ) { @@ -83,7 +89,7 @@ export abstract class CodeCommandHandler implements ICodeCommandHandler { modelName = this.groqModel; if (!apiKey || !modelName) { vscodeErrorMessage( - "Configuration not found. Go to settings, search for Your coding buddy. Fill up the model and model name" + "Configuration not found. Go to settings, search for Your coding buddy. Fill up the model and model name", ); } model = this.createGroqModel(apiKey); @@ -109,7 +115,9 @@ export abstract class CodeCommandHandler implements ICodeCommandHandler { return { generativeAi: this.generativeAi, model, modelName }; } catch (error) { console.error("Error creating model:", error); - vscode.window.showErrorMessage("An error occurred while creating the model. Please try again."); + vscode.window.showErrorMessage( + "An error occurred while creating the model. Please try again.", + ); } } @@ -142,7 +150,9 @@ export abstract class CodeCommandHandler implements ICodeCommandHandler { return new Groq({ apiKey }); } - protected async generateModelResponse(text: string): Promise { + protected async generateModelResponse( + text: string, + ): Promise { try { const activeModel = this.createModel(); if (!activeModel) { @@ -179,7 +189,7 @@ export abstract class CodeCommandHandler implements ICodeCommandHandler { if (!response) { throw new Error( - "Could not generate response. Check your settings, ensure the API keys and Model Name is added properly." + "Could not generate response. Check your settings, ensure the API keys and Model Name is added properly.", ); } if (this.action.includes("chart")) { @@ -188,7 +198,9 @@ export abstract class CodeCommandHandler implements ICodeCommandHandler { return response; } catch (error) { this.logger.error("Error generating response:", error); - vscode.window.showErrorMessage("An error occurred while generating the response. Please try again."); + vscode.window.showErrorMessage( + "An error occurred while generating the response. Please try again.", + ); } } @@ -199,12 +211,19 @@ export abstract class CodeCommandHandler implements ICodeCommandHandler { return inputString; } - async generateGeminiResponse(model: any, text: string): Promise { + async generateGeminiResponse( + model: any, + text: string, + ): Promise { const result = await model.generateContent(text); return result ? await result.response.text() : undefined; } - private async anthropicResponse(model: Anthropic, generativeAiModel: string, userPrompt: string) { + private async anthropicResponse( + model: Anthropic, + generativeAiModel: string, + userPrompt: string, + ) { try { const response = await model.messages.create({ model: generativeAiModel, @@ -218,9 +237,15 @@ export abstract class CodeCommandHandler implements ICodeCommandHandler { } } - private async groqResponse(model: Groq, prompt: string, generativeAiModel: string): Promise { + private async groqResponse( + model: Groq, + prompt: string, + generativeAiModel: string, + ): Promise { try { - const chatHistory = Memory.has(COMMON.ANTHROPIC_CHAT_HISTORY) ? Memory.get(COMMON.GROQ_CHAT_HISTORY) : []; + const chatHistory = Memory.has(COMMON.ANTHROPIC_CHAT_HISTORY) + ? Memory.get(COMMON.GROQ_CHAT_HISTORY) + : []; const params = { messages: [ ...chatHistory, @@ -232,7 +257,8 @@ export abstract class CodeCommandHandler implements ICodeCommandHandler { model: generativeAiModel, }; - const completion: Groq.Chat.ChatCompletion = await model.chat.completions.create(params); + const completion: Groq.Chat.ChatCompletion = + await model.chat.completions.create(params); return completion.choices[0]?.message?.content ?? undefined; } catch (error) { this.logger.error("Error generating response:", error); @@ -243,7 +269,9 @@ export abstract class CodeCommandHandler implements ICodeCommandHandler { abstract createPrompt(text?: string): any; - async generateResponse(message?: string): Promise { + async generateResponse( + message?: string, + ): Promise { this.logger.info(this.action); let prompt; const selectedCode = this.getSelectedWindowArea(); @@ -255,7 +283,9 @@ export abstract class CodeCommandHandler implements ICodeCommandHandler { if (message && selectedCode) { prompt = await this.createPrompt(`${message} \n ${selectedCode}`); } else { - message ? (prompt = await this.createPrompt(message)) : (prompt = await this.createPrompt(selectedCode)); + message + ? (prompt = await this.createPrompt(message)) + : (prompt = await this.createPrompt(selectedCode)); } if (!prompt) { @@ -338,7 +368,9 @@ export abstract class CodeCommandHandler implements ICodeCommandHandler { placeHolder: "Enter instructions for CodeBuddy", ignoreFocusOut: true, validateInput: (text) => { - return text === "" ? "Enter instructions for CodeBuddy or press Escape to close chat box" : null; + return text === "" + ? "Enter instructions for CodeBuddy or press Escape to close chat box" + : null; }, }); return userPrompt; @@ -350,7 +382,9 @@ export abstract class CodeCommandHandler implements ICodeCommandHandler { async execute(message?: string): Promise { try { let prompt: string | undefined; - const response = (await this.generateResponse(prompt ? prompt : message)) as string; + const response = (await this.generateResponse( + prompt ? prompt : message, + )) as string; if (!response) { vscode.window.showErrorMessage("model not reponding, try again later"); return; @@ -383,7 +417,10 @@ export abstract class CodeCommandHandler implements ICodeCommandHandler { break; } } catch (error) { - this.logger.error("Error while passing model response to the webview", error); + this.logger.error( + "Error while passing model response to the webview", + error, + ); } } } diff --git a/src/emitter/publisher.ts b/src/emitter/publisher.ts index 99a045d..242e18d 100644 --- a/src/emitter/publisher.ts +++ b/src/emitter/publisher.ts @@ -9,16 +9,27 @@ export class EventEmitter extends BaseEmitter> { onPromptGenerated: vscode.Event = this.createEvent("onQuery"); onThinking: vscode.Event = this.createEvent("onThinking"); onResponse: vscode.Event = this.createEvent("onResponse"); - onSecretChange: vscode.Event = this.createEvent("onSecretChange"); + onSecretChange: vscode.Event = + this.createEvent("onSecretChange"); onBootstrap: vscode.Event = this.createEvent("onBootstrap"); onFileUpload: vscode.Event = this.createEvent("onFileUpload"); - onFileProcessSuccess: vscode.Event = this.createEvent("onFileProcessSuccess"); - onActiveworkspaceUpdate: vscode.Event = this.createEvent("onActiveworkspaceUpdate"); - onFilesRetrieved: vscode.Event = this.createEvent("onFilesRetrieved"); - onStrategizing: vscode.Event = this.createEvent("onStrategizing"); - onModelChange: vscode.Event = this.createEvent("onModelChange"); - onModelChangeSuccess: vscode.Event = this.createEvent("onModelChangeSuccess"); - onHistoryUpdated: vscode.Event = this.createEvent("onHistoryUpdated"); + onFileProcessSuccess: vscode.Event = this.createEvent( + "onFileProcessSuccess", + ); + onActiveworkspaceUpdate: vscode.Event = this.createEvent( + "onActiveworkspaceUpdate", + ); + onFilesRetrieved: vscode.Event = + this.createEvent("onFilesRetrieved"); + onStrategizing: vscode.Event = + this.createEvent("onStrategizing"); + onModelChange: vscode.Event = + this.createEvent("onModelChange"); + onModelChangeSuccess: vscode.Event = this.createEvent( + "onModelChangeSuccess", + ); + onHistoryUpdated: vscode.Event = + this.createEvent("onHistoryUpdated"); /** * Emits a generic event with specified status, message, and optional data. diff --git a/src/extension.ts b/src/extension.ts index 409bcb9..c15be4b 100644 --- a/src/extension.ts +++ b/src/extension.ts @@ -1,7 +1,12 @@ import * as fs from "fs"; import * as path from "path"; import * as vscode from "vscode"; -import { APP_CONFIG, generativeAiModels, OLA_ACTIONS, USER_MESSAGE } from "./application/constant"; +import { + APP_CONFIG, + generativeAiModels, + OLA_ACTIONS, + USER_MESSAGE, +} from "./application/constant"; import { Comments } from "./commands/comment"; import { ExplainCode } from "./commands/explain"; import { FixError } from "./commands/fixError"; @@ -86,7 +91,8 @@ export async function activate(context: vscode.ExtensionContext) { const { apiKey, model } = getAPIKeyAndModel("gemini"); FileUploadService.initialize(apiKey); await credentials.initialize(context); - const session: vscode.AuthenticationSession | undefined = await credentials.getSession(); + const session: vscode.AuthenticationSession | undefined = + await credentials.getSession(); logger.info(`Logged into GitHub as ${session?.account.label}`); Memory.getInstance(); @@ -108,17 +114,47 @@ export async function activate(context: vscode.ExtensionContext) { generateCodeChart, inlineChat, } = OLA_ACTIONS; - const getComment = new Comments(`${USER_MESSAGE} generates the code comments...`, context); - const getInLineChat = new InLineChat(`${USER_MESSAGE} generates a response...`, context); - const generateOptimizeCode = new OptimizeCode(`${USER_MESSAGE} optimizes the code...`, context); - const generateRefactoredCode = new RefactorCode(`${USER_MESSAGE} refactors the code...`, context); - const explainCode = new ExplainCode(`${USER_MESSAGE} explains the code...`, context); - const generateReview = new ReviewCode(`${USER_MESSAGE} reviews the code...`, context); - const codeChartGenerator = new CodeChartGenerator(`${USER_MESSAGE} creates the code chart...`, context); - const generateCommitMessage = new GenerateCommitMessage(`${USER_MESSAGE} generates a commit message...`, context); - const generateInterviewQuestions = new InterviewMe(`${USER_MESSAGE} generates interview questions...`, context); - - const generateUnitTests = new GenerateUnitTest(`${USER_MESSAGE} generates unit tests...`, context); + const getComment = new Comments( + `${USER_MESSAGE} generates the code comments...`, + context, + ); + const getInLineChat = new InLineChat( + `${USER_MESSAGE} generates a response...`, + context, + ); + const generateOptimizeCode = new OptimizeCode( + `${USER_MESSAGE} optimizes the code...`, + context, + ); + const generateRefactoredCode = new RefactorCode( + `${USER_MESSAGE} refactors the code...`, + context, + ); + const explainCode = new ExplainCode( + `${USER_MESSAGE} explains the code...`, + context, + ); + const generateReview = new ReviewCode( + `${USER_MESSAGE} reviews the code...`, + context, + ); + const codeChartGenerator = new CodeChartGenerator( + `${USER_MESSAGE} creates the code chart...`, + context, + ); + const generateCommitMessage = new GenerateCommitMessage( + `${USER_MESSAGE} generates a commit message...`, + context, + ); + const generateInterviewQuestions = new InterviewMe( + `${USER_MESSAGE} generates interview questions...`, + context, + ); + + const generateUnitTests = new GenerateUnitTest( + `${USER_MESSAGE} generates unit tests...`, + context, + ); const actionMap = { [comment]: async () => await getComment.execute(), @@ -128,21 +164,29 @@ export async function activate(context: vscode.ExtensionContext) { [interviewMe]: async () => await generateInterviewQuestions.execute(), [generateUnitTest]: async () => await generateUnitTests.execute(), [fix]: (errorMessage: string) => - new FixError(`${USER_MESSAGE} finds a solution to the error...`, context, errorMessage).execute(errorMessage), + new FixError( + `${USER_MESSAGE} finds a solution to the error...`, + context, + errorMessage, + ).execute(errorMessage), [explain]: async () => await explainCode.execute(), - [commitMessage]: async () => await generateCommitMessage.execute("commitMessage"), + [commitMessage]: async () => + await generateCommitMessage.execute("commitMessage"), [generateCodeChart]: async () => await codeChartGenerator.execute(), [inlineChat]: async () => await getInLineChat.execute(), }; - let subscriptions: vscode.Disposable[] = Object.entries(actionMap).map(([action, handler]) => - vscode.commands.registerCommand(action, handler) + let subscriptions: vscode.Disposable[] = Object.entries(actionMap).map( + ([action, handler]) => vscode.commands.registerCommand(action, handler), ); const selectedGenerativeAiModel = getConfigValue("generativeAi.option"); const quickFix = new CodeActionsProvider(); - quickFixCodeAction = vscode.languages.registerCodeActionsProvider({ scheme: "file", language: "*" }, quickFix); + quickFixCodeAction = vscode.languages.registerCodeActionsProvider( + { scheme: "file", language: "*" }, + quickFix, + ); agentEventEmmitter = new EventEmitter(); @@ -186,12 +230,23 @@ export async function activate(context: vscode.ExtensionContext) { const modelConfig = modelConfigurations[selectedGenerativeAiModel]; const apiKey = getConfigValue(modelConfig.key); const apiModel = getConfigValue(modelConfig.model); - providerManager.initializeProvider(selectedGenerativeAiModel, apiKey, apiModel, true); + providerManager.initializeProvider( + selectedGenerativeAiModel, + apiKey, + apiModel, + true, + ); } - context.subscriptions.push(...subscriptions, quickFixCodeAction, agentEventEmmitter); + context.subscriptions.push( + ...subscriptions, + quickFixCodeAction, + agentEventEmmitter, + ); } catch (error) { Memory.clear(); - vscode.window.showErrorMessage("An Error occured while setting up generative AI model"); + vscode.window.showErrorMessage( + "An Error occured while setting up generative AI model", + ); console.log(error); } } diff --git a/src/infrastructure/logger/logger.ts b/src/infrastructure/logger/logger.ts index 9f80485..7beb879 100644 --- a/src/infrastructure/logger/logger.ts +++ b/src/infrastructure/logger/logger.ts @@ -33,7 +33,11 @@ export interface ILoggerConfig { export interface ITelemetry { recordEvent(name: string, properties?: Record): void; - recordMetric(name: string, value: number, tags?: Record): void; + recordMetric( + name: string, + value: number, + tags?: Record, + ): void; startSpan(name: string): ISpan; } @@ -59,12 +63,20 @@ export class Logger { static instance: Logger; constructor(private readonly module: string) {} - public static initialize(module: string, config: Partial, telemetry?: ITelemetry): Logger { + public static initialize( + module: string, + config: Partial, + telemetry?: ITelemetry, + ): Logger { Logger.config = { ...Logger.config, ...config }; if (Logger.config.enableFile && !Logger.config.filePath) { const workspaceFolder = vscode.workspace.workspaceFolders?.[0]; if (workspaceFolder) { - const logDir = path.join(workspaceFolder.uri.fsPath, ".codebuddy", "logs"); + const logDir = path.join( + workspaceFolder.uri.fsPath, + ".codebuddy", + "logs", + ); if (!fs.existsSync(logDir)) { fs.mkdirSync(logDir, { recursive: true }); } @@ -151,7 +163,11 @@ export class Logger { const event = this.formatLogEvent(level, message, data); this.logToConsole(event); this.logToFile(event); - if (level === LogLevel.INFO || level === LogLevel.WARN || level === LogLevel.ERROR) { + if ( + level === LogLevel.INFO || + level === LogLevel.WARN || + level === LogLevel.ERROR + ) { this.logToTelemetry(event); } } @@ -179,12 +195,18 @@ export class Logger { this.info(`${operation} completed in ${duration}ms`); if (Logger.telemetry) { - Logger.telemetry.recordMetric(`duration.${this.module}.${operation}`, duration); + Logger.telemetry.recordMetric( + `duration.${this.module}.${operation}`, + duration, + ); } }; } - public traceOperation(operation: string, fn: () => Promise): Promise { + public traceOperation( + operation: string, + fn: () => Promise, + ): Promise { const span = Logger.telemetry?.startSpan(`${this.module}.${operation}`); const endTimer = this.startTimer(operation); diff --git a/src/llms/gemini/gemini.ts b/src/llms/gemini/gemini.ts index d090031..1fcc0a3 100644 --- a/src/llms/gemini/gemini.ts +++ b/src/llms/gemini/gemini.ts @@ -15,13 +15,20 @@ import { Memory } from "../../memory/base"; import { CodeBuddyToolProvider } from "../../tools/factory/tool"; import { createPrompt } from "../../utils/prompt"; import { BaseLLM } from "../base"; -import { GeminiModelResponseType, ILlmConfig, GeminiLLMSnapShot } from "../interface"; +import { + GeminiModelResponseType, + ILlmConfig, + GeminiLLMSnapShot, +} from "../interface"; import { Message } from "../message"; import { Logger } from "../../infrastructure/logger/logger"; import { GroqLLM } from "../groq/groq"; import { getAPIKeyAndModel } from "../../utils/utils"; -export class GeminiLLM extends BaseLLM implements vscode.Disposable { +export class GeminiLLM + extends BaseLLM + implements vscode.Disposable +{ private readonly generativeAi: GoogleGenerativeAI; private response: EmbedContentResponse | GenerateContentResult | undefined; protected readonly orchestrator: Orchestrator; @@ -52,7 +59,11 @@ export class GeminiLLM extends BaseLLM implements vscode.Disp } private intializeDisposable(): void { - this.disposables.push(vscode.workspace.onDidChangeConfiguration(() => this.handleConfigurationChange())); + this.disposables.push( + vscode.workspace.onDidChangeConfiguration(() => + this.handleConfigurationChange(), + ), + ); } // TODO handle configuration, when you introduce multiple LLM Agents @@ -77,7 +88,10 @@ export class GeminiLLM extends BaseLLM implements vscode.Disp } } - public async generateText(prompt: string, instruction?: string): Promise { + public async generateText( + prompt: string, + instruction?: string, + ): Promise { try { const model = this.getModel(); const result: GenerateContentResult = await model.generateContent(prompt); @@ -91,14 +105,22 @@ export class GeminiLLM extends BaseLLM implements vscode.Disp getModel(modelParams?: Partial): GenerativeModel { try { - const model: GenerativeModel | undefined = this.generativeAi.getGenerativeModel({ - model: this.config.model, - tools: modelParams?.tools ?? this.config.tools, - systemInstruction: modelParams?.systemInstruction ?? this.config.systemInstruction, - generationConfig: { - stopSequences: ["Thank you", "Done", "End", "stuck in a loop", "loop"], - }, - }); + const model: GenerativeModel | undefined = + this.generativeAi.getGenerativeModel({ + model: this.config.model, + tools: modelParams?.tools ?? this.config.tools, + systemInstruction: + modelParams?.systemInstruction ?? this.config.systemInstruction, + generationConfig: { + stopSequences: [ + "Thank you", + "Done", + "End", + "stuck in a loop", + "loop", + ], + }, + }); if (!model) { throw new Error(`Error retrieving model ${this.config.model}`); } @@ -117,19 +139,28 @@ export class GeminiLLM extends BaseLLM implements vscode.Disp }; } - async generateContentWithTools(userInput: string): Promise { + async generateContentWithTools( + userInput: string, + ): Promise { try { - await this.buildChatHistory(userInput, undefined, undefined, undefined, true); + await this.buildChatHistory( + userInput, + undefined, + undefined, + undefined, + true, + ); const prompt = createPrompt(userInput); const contents = Memory.get(COMMON.GEMINI_CHAT_HISTORY) as Content[]; const tools: any = this.getTools(); const model = this.getModel({ systemInstruction: prompt, tools }); - const generateContentResponse: GenerateContentResult = await model.generateContent({ - contents, - toolConfig: { - functionCallingConfig: { mode: FunctionCallingMode.AUTO }, - }, - }); + const generateContentResponse: GenerateContentResult = + await model.generateContent({ + contents, + toolConfig: { + functionCallingConfig: { mode: FunctionCallingMode.AUTO }, + }, + }); return generateContentResponse; } catch (error: any) { throw Error(error); @@ -150,7 +181,10 @@ export class GeminiLLM extends BaseLLM implements vscode.Disp * @param userInput The original user input. * @returns A promise that resolves to the final result string or undefined if an error occurs. */ - private async processToolCalls(toolCalls: FunctionCall[], userInput: string): Promise { + private async processToolCalls( + toolCalls: FunctionCall[], + userInput: string, + ): Promise { let finalResult: string | undefined = undefined; try { let userQuery = userInput; @@ -158,7 +192,8 @@ export class GeminiLLM extends BaseLLM implements vscode.Disp for (const functionCall of toolCalls) { try { - const functionResult = await this.handleSingleFunctionCall(functionCall); + const functionResult = + await this.handleSingleFunctionCall(functionCall); if (functionCall.name === "think") { const thought = functionResult?.content; @@ -179,7 +214,13 @@ export class GeminiLLM extends BaseLLM implements vscode.Disp finalResult = userQuery; - await this.buildChatHistory(userQuery, functionCall.name, functionResult, undefined, false); + await this.buildChatHistory( + userQuery, + functionCall.name, + functionResult, + undefined, + false, + ); const snapShot = this.createSnapShot({ lastQuery: userQuery, @@ -196,12 +237,12 @@ export class GeminiLLM extends BaseLLM implements vscode.Disp const retry = await vscode.window.showErrorMessage( `Function call failed: ${error.message}. Retry or abort?`, "Retry", - "Abort" + "Abort", ); if (retry === "Retry") { finalResult = await this.fallBackToGroq( - `User Input: ${this.userQuery} \n Plans: ${userInput}Write production ready code to demonstrate your solution` + `User Input: ${this.userQuery} \n Plans: ${userInput}Write production ready code to demonstrate your solution`, ); } else { finalResult = `Function call error: ${error.message}. Falling back to last response.`; @@ -213,12 +254,14 @@ export class GeminiLLM extends BaseLLM implements vscode.Disp } catch (error) { console.error("Error processing tool calls", error); finalResult = await this.fallBackToGroq( - `User Input: ${this.userQuery} \n Plans: ${userInput}Write production ready code to demonstrate your solution` + `User Input: ${this.userQuery} \n Plans: ${userInput}Write production ready code to demonstrate your solution`, ); } } - async processUserQuery(userInput: string): Promise { + async processUserQuery( + userInput: string, + ): Promise { let finalResult: string | GenerateContentResult | undefined; let userQuery = userInput; const MAX_BASE_CALLS = 5; @@ -240,13 +283,25 @@ export class GeminiLLM extends BaseLLM implements vscode.Disp while (callCount < this.calculateDynamicCallLimit(userQuery)) { const timeoutPromise = new Promise((_, reject) => - setTimeout(() => reject(new Error("TImeout Exceeded")), this.timeOutMs) + setTimeout( + () => reject(new Error("TImeout Exceeded")), + this.timeOutMs, + ), ); const responsePromise = await this.generateContentWithTools(userQuery); - const result = (await Promise.race([responsePromise, timeoutPromise])) as GeminiModelResponseType; + const result = (await Promise.race([ + responsePromise, + timeoutPromise, + ])) as GeminiModelResponseType; this.response = result; if (result && "response" in result) { - const { text, usageMetadata, functionCalls, candidates, promptFeedback } = result.response; + const { + text, + usageMetadata, + functionCalls, + candidates, + promptFeedback, + } = result.response; if ((functionCalls?.()?.length ?? 0) === 0) { finalResult = text(); break; @@ -256,20 +311,24 @@ export class GeminiLLM extends BaseLLM implements vscode.Disp const tokenCount = usageMetadata?.totalTokenCount ?? 0; const toolCalls = functionCalls ? functionCalls() : []; const currentCallSignatures = toolCalls - ? toolCalls.map((call) => `${call.name}:${JSON.stringify(call.args)}`).join(";") + ? toolCalls + .map((call) => `${call.name}:${JSON.stringify(call.args)}`) + .join(";") : ""; if (this.lastFunctionCalls.has(currentCallSignatures)) { finalResult = await this.groqLLM.generateText(userInput); if (finalResult) { finalResult = await this.fallBackToGroq( - `User Input: ${this.userQuery} \n Plans: ${userInput} Write production ready code to demonstrate your solution` + `User Input: ${this.userQuery} \n Plans: ${userInput} Write production ready code to demonstrate your solution`, ); return finalResult; } } this.lastFunctionCalls.add(currentCallSignatures); if (this.lastFunctionCalls.size > 10) { - this.lastFunctionCalls = new Set([...this.lastFunctionCalls].slice(-10)); + this.lastFunctionCalls = new Set( + [...this.lastFunctionCalls].slice(-10), + ); } if (toolCalls && toolCalls.length > 0) { finalResult = await this.processToolCalls(toolCalls, userQuery); @@ -303,7 +362,10 @@ export class GeminiLLM extends BaseLLM implements vscode.Disp } const snapshot = Memory.get(COMMON.GEMINI_SNAPSHOT); if (snapshot?.length > 0) { - Memory.removeItems(COMMON.GEMINI_SNAPSHOT, Memory.get(COMMON.GEMINI_SNAPSHOT).length); + Memory.removeItems( + COMMON.GEMINI_SNAPSHOT, + Memory.get(COMMON.GEMINI_SNAPSHOT).length, + ); } this.orchestrator.publish("onQuery", String(finalResult)); return finalResult; @@ -314,13 +376,16 @@ export class GeminiLLM extends BaseLLM implements vscode.Disp // ); console.log("Error processing user query", error); finalResult = await this.fallBackToGroq( - `User Input: ${this.userQuery} \n Plans: ${userInput}Write production ready code to demonstrate your solution` + `User Input: ${this.userQuery} \n Plans: ${userInput}Write production ready code to demonstrate your solution`, ); console.log("Model not responding at this time, please try again", error); } } - private async handleSingleFunctionCall(functionCall: FunctionCall, attempt: number = 0): Promise { + private async handleSingleFunctionCall( + functionCall: FunctionCall, + attempt: number = 0, + ): Promise { const MAX_RETRIES = 3; const args = functionCall.args as Record; const name = functionCall.name; @@ -339,7 +404,10 @@ export class GeminiLLM extends BaseLLM implements vscode.Disp }; } catch (error: any) { if (attempt < MAX_RETRIES) { - console.warn(`Retry attempt ${attempt + 1} for function ${name}`, JSON.stringify({ error, args })); + console.warn( + `Retry attempt ${attempt + 1} for function ${name}`, + JSON.stringify({ error, args }), + ); return this.handleSingleFunctionCall(functionCall, attempt + 1); } } @@ -361,7 +429,7 @@ export class GeminiLLM extends BaseLLM implements vscode.Disp functionCall?: any, functionResponse?: any, chat?: ChatSession, - isInitialQuery: boolean = false + isInitialQuery: boolean = false, ): Promise { // Check if it makes sense to kind of seperate agent and Edit Mode memory, when switching. let chatHistory: any = Memory.get(COMMON.GEMINI_CHAT_HISTORY) || []; @@ -387,15 +455,17 @@ export class GeminiLLM extends BaseLLM implements vscode.Disp Message.of({ role: "model", parts: [{ functionCall }], - }) + }), ); - const observationResult = await chat.sendMessage(`Tool result: ${JSON.stringify(functionResponse)}`); + const observationResult = await chat.sendMessage( + `Tool result: ${JSON.stringify(functionResponse)}`, + ); chatHistory.push( Message.of({ role: "user", parts: [{ text: observationResult.response.text() }], - }) + }), ); } if (chatHistory.length > 50) chatHistory = chatHistory.slice(-50); @@ -452,7 +522,9 @@ export class GeminiLLM extends BaseLLM implements vscode.Disp content: finalResult, }); - let chatHistory = Memory.has(COMMON.GROQ_CHAT_HISTORY) ? Memory.get(COMMON.GROQ_CHAT_HISTORY) : [systemMessage]; + let chatHistory = Memory.has(COMMON.GROQ_CHAT_HISTORY) + ? Memory.get(COMMON.GROQ_CHAT_HISTORY) + : [systemMessage]; chatHistory = [...chatHistory, systemMessage]; this.orchestrator.publish("onQuery", String(finalResult)); diff --git a/src/services/agent-state.ts b/src/services/agent-state.ts index d6af78c..534add2 100644 --- a/src/services/agent-state.ts +++ b/src/services/agent-state.ts @@ -19,7 +19,9 @@ export class AgentService { } async getState(agentId: string): Promise { - return this.storage.get(`${COMMON.AGENT_STATE_PREFIX}_${agentId}`); + return this.storage.get( + `${COMMON.AGENT_STATE_PREFIX}_${agentId}`, + ); } async saveState(agentId: string, state: AgentState): Promise { @@ -27,18 +29,30 @@ export class AgentService { } async getChatHistory(agentId: string): Promise { - return (await this.storage.get(`${COMMON.CHAT_HISTORY_PREFIX}_${agentId}`)) || []; + return ( + (await this.storage.get( + `${COMMON.CHAT_HISTORY_PREFIX}_${agentId}`, + )) || [] + ); } async saveChatHistory(agentId: string, history: any[]): Promise { - return this.storage.set(`${COMMON.CHAT_HISTORY_PREFIX}_${agentId}`, history); + return this.storage.set( + `${COMMON.CHAT_HISTORY_PREFIX}_${agentId}`, + history, + ); } async getSnapshot(agentId: string): Promise { - return this.storage.get(`${COMMON.SNAPSHOT_PREFIX}_${agentId}`); + return this.storage.get( + `${COMMON.SNAPSHOT_PREFIX}_${agentId}`, + ); } - async saveSnapshot(agentId: string, snapshot: GeminiLLMSnapShot): Promise { + async saveSnapshot( + agentId: string, + snapshot: GeminiLLMSnapShot, + ): Promise { return this.storage.set(`${COMMON.SNAPSHOT_PREFIX}_${agentId}`, snapshot); } diff --git a/src/services/file-storage.ts b/src/services/file-storage.ts index 38f73d8..90f0ef2 100644 --- a/src/services/file-storage.ts +++ b/src/services/file-storage.ts @@ -15,7 +15,10 @@ export class FileStorage implements IStorage { private readonly storagePath: string; constructor() { - this.storagePath = path.join(vscode.workspace.workspaceFolders?.[0]?.uri.fsPath ?? "", ".codebuddy"); + this.storagePath = path.join( + vscode.workspace.workspaceFolders?.[0]?.uri.fsPath ?? "", + ".codebuddy", + ); if (!fs.existsSync(this.storagePath)) { fs.mkdirSync(this.storagePath, { recursive: true }); } @@ -42,7 +45,11 @@ export class FileStorage implements IStorage { async set(key: string, value: T): Promise { try { const filePath = this.getFilePath(key); - await fs.promises.writeFile(filePath, JSON.stringify(value, null, 2), "utf-8"); + await fs.promises.writeFile( + filePath, + JSON.stringify(value, null, 2), + "utf-8", + ); } catch (error) { console.error(`Error storing data for key ${key}:`, error); throw new Error(`Failed to store data: ${error}`); diff --git a/src/services/url-reranker.ts b/src/services/url-reranker.ts index df4f3d7..e799ae2 100644 --- a/src/services/url-reranker.ts +++ b/src/services/url-reranker.ts @@ -66,7 +66,7 @@ export class UrlReranker { if (!title) return 0; const matches = UrlReranker.KEYWORDS.filter((keyword) => - title.toLowerCase().includes(keyword.toLowerCase()) + title.toLowerCase().includes(keyword.toLowerCase()), ); return matches.length / UrlReranker.KEYWORDS.length; @@ -97,7 +97,7 @@ export class UrlReranker { const codeBlockCount = this.countCodeBlocks(metadata.content); const hasAdequateExplanation = this.hasAdequateExplanation( - metadata.content + metadata.content, ); return codeBlockCount + (hasAdequateExplanation ? 1 : 0); @@ -130,7 +130,7 @@ export class UrlReranker { */ calculateFinalScore(metadata: IPageMetada): number { const titleRelevanceScore = this.calculateTitleRelevanceScore( - metadata.title ?? "" + metadata.title ?? "", ); const reputationScore = this.calculateSourceReputationScore(metadata); const contentQualityScore = this.calculateContentQualityScore(metadata); diff --git a/src/webview-providers/anthropic.ts b/src/webview-providers/anthropic.ts index af4e75d..6354168 100644 --- a/src/webview-providers/anthropic.ts +++ b/src/webview-providers/anthropic.ts @@ -1,8 +1,16 @@ import * as vscode from "vscode"; import { BaseWebViewProvider } from "./base"; -import { COMMON, generativeAiModels, GROQ_CONFIG } from "../application/constant"; +import { + COMMON, + generativeAiModels, + GROQ_CONFIG, +} from "../application/constant"; import Anthropic from "@anthropic-ai/sdk"; -import { createAnthropicClient, getGenerativeAiModel, getXGroKBaseURL } from "../utils/utils"; +import { + createAnthropicClient, + getGenerativeAiModel, + getXGroKBaseURL, +} from "../utils/utils"; import { Memory } from "../memory/base"; import { IMessageInput, Message } from "../llms/message"; @@ -14,13 +22,16 @@ export class AnthropicWebViewProvider extends BaseWebViewProvider { apiKey: string, generativeAiModel: string, context: vscode.ExtensionContext, - protected baseUrl?: string + protected baseUrl?: string, ) { super(extensionUri, apiKey, generativeAiModel, context); this.model = createAnthropicClient(this.apiKey, this.baseUrl); } - public async sendResponse(response: string, currentChat: string): Promise { + public async sendResponse( + response: string, + currentChat: string, + ): Promise { try { const type = currentChat === "bot" ? "bot-response" : "user-input"; if (currentChat === "bot") { @@ -28,20 +39,25 @@ export class AnthropicWebViewProvider extends BaseWebViewProvider { Message.of({ role: "assistant", content: response, - }) + }), ); } else { this.chatHistory.push( Message.of({ role: "user", content: response, - }) + }), ); } if (this.chatHistory.length === 2) { - const chatHistory = Memory.has(COMMON.ANTHROPIC_CHAT_HISTORY) ? Memory.get(COMMON.ANTHROPIC_CHAT_HISTORY) : []; - Memory.set(COMMON.ANTHROPIC_CHAT_HISTORY, [...chatHistory, ...this.chatHistory]); + const chatHistory = Memory.has(COMMON.ANTHROPIC_CHAT_HISTORY) + ? Memory.get(COMMON.ANTHROPIC_CHAT_HISTORY) + : []; + Memory.set(COMMON.ANTHROPIC_CHAT_HISTORY, [ + ...chatHistory, + ...this.chatHistory, + ]); } return await this.currentWebView?.webview.postMessage({ type, @@ -52,7 +68,10 @@ export class AnthropicWebViewProvider extends BaseWebViewProvider { } } - async generateResponse(message: string, metaData?: any): Promise { + async generateResponse( + message: string, + metaData?: any, + ): Promise { try { let context: string | undefined; if (metaData?.context.length > 0) { @@ -62,7 +81,10 @@ export class AnthropicWebViewProvider extends BaseWebViewProvider { if (getGenerativeAiModel() === generativeAiModels.GROK) { this.baseUrl = getXGroKBaseURL(); } - const userMessage = Message.of({ role: "user", content: `${message} \n context: ${context}` }); + const userMessage = Message.of({ + role: "user", + content: `${message} \n context: ${context}`, + }); let chatHistory = Memory.has(COMMON.ANTHROPIC_CHAT_HISTORY) ? Memory.get(COMMON.ANTHROPIC_CHAT_HISTORY) : [userMessage]; @@ -82,7 +104,9 @@ export class AnthropicWebViewProvider extends BaseWebViewProvider { } catch (error) { console.error(error); Memory.set(COMMON.ANTHROPIC_CHAT_HISTORY, []); - vscode.window.showErrorMessage("Model not responding, please resend your question"); + vscode.window.showErrorMessage( + "Model not responding, please resend your question", + ); } } diff --git a/src/webview-providers/base.ts b/src/webview-providers/base.ts index dca79a1..8ee8313 100644 --- a/src/webview-providers/base.ts +++ b/src/webview-providers/base.ts @@ -1,6 +1,9 @@ import * as vscode from "vscode"; import { Orchestrator } from "../agents/orchestrator"; -import { FolderEntry, IContextInfo } from "../application/interfaces/workspace.interface"; +import { + FolderEntry, + IContextInfo, +} from "../application/interfaces/workspace.interface"; import { IEventPayload } from "../emitter/interface"; import { Logger } from "../infrastructure/logger/logger"; import { AgentService } from "../services/agent-state"; @@ -28,7 +31,7 @@ export abstract class BaseWebViewProvider implements vscode.Disposable { private readonly _extensionUri: vscode.Uri, protected readonly apiKey: string, protected readonly generativeAiModel: string, - context: vscode.ExtensionContext + context: vscode.ExtensionContext, ) { this.fileManager = FileManager.initialize(context, "files"); this.fileService = FileService.getInstance(); @@ -46,10 +49,16 @@ export abstract class BaseWebViewProvider implements vscode.Disposable { this.orchestrator.onThinking(this.handleModelResponseEvent.bind(this)), this.orchestrator.onUpdate(this.handleModelResponseEvent.bind(this)), this.orchestrator.onError(this.handleModelResponseEvent.bind(this)), - this.orchestrator.onSecretChange(this.handleModelResponseEvent.bind(this)), - this.orchestrator.onActiveworkspaceUpdate(this.handleGenericEvents.bind(this)), + this.orchestrator.onSecretChange( + this.handleModelResponseEvent.bind(this), + ), + this.orchestrator.onActiveworkspaceUpdate( + this.handleGenericEvents.bind(this), + ), this.orchestrator.onFileUpload(this.handleModelResponseEvent.bind(this)), - this.orchestrator.onStrategizing(this.handleModelResponseEvent.bind(this)) + this.orchestrator.onStrategizing( + this.handleModelResponseEvent.bind(this), + ), ); } @@ -69,7 +78,9 @@ export abstract class BaseWebViewProvider implements vscode.Disposable { webviewView.webview.options = webviewOptions; if (!this.apiKey) { - vscode.window.showErrorMessage("API key not configured. Check your settings."); + vscode.window.showErrorMessage( + "API key not configured. Check your settings.", + ); return; } @@ -82,7 +93,10 @@ export abstract class BaseWebViewProvider implements vscode.Disposable { } private async setWebviewHtml(view: vscode.WebviewView): Promise { - view.webview.html = getWebviewContent(this.currentWebView?.webview!, this._extensionUri); + view.webview.html = getWebviewContent( + this.currentWebView?.webview!, + this._extensionUri, + ); } private async getFiles() { @@ -97,8 +111,10 @@ export abstract class BaseWebViewProvider implements vscode.Disposable { private async publishWorkSpace(): Promise { try { - const filesAndDirs: IContextInfo = await this.workspaceService.getContextInfo(true); - const workspaceFiles: Map | undefined = filesAndDirs.workspaceFiles; + const filesAndDirs: IContextInfo = + await this.workspaceService.getContextInfo(true); + const workspaceFiles: Map | undefined = + filesAndDirs.workspaceFiles; if (!workspaceFiles) { this.logger.warn("There no files within the workspace"); return; @@ -120,7 +136,10 @@ export abstract class BaseWebViewProvider implements vscode.Disposable { let response: any; switch (message.command) { case "user-input": - response = await this.generateResponse(message.message, message.metaData); + response = await this.generateResponse( + message.message, + message.metaData, + ); if (response) { await this.sendResponse(formatText(response), "bot"); } @@ -141,7 +160,7 @@ export abstract class BaseWebViewProvider implements vscode.Disposable { default: throw new Error("Unknown command"); } - }) + }), ); } catch (error) { this.logger.error("Message handler failed", error); @@ -157,11 +176,20 @@ export abstract class BaseWebViewProvider implements vscode.Disposable { } public handleModelResponseEvent(event: IEventPayload) { - this.sendResponse(formatText(event.message), event.message === "folders" ? "bootstrap" : "bot"); + this.sendResponse( + formatText(event.message), + event.message === "folders" ? "bootstrap" : "bot", + ); } - abstract generateResponse(message?: string, metaData?: Record): Promise; + abstract generateResponse( + message?: string, + metaData?: Record, + ): Promise; - abstract sendResponse(response: string, currentChat?: string): Promise; + abstract sendResponse( + response: string, + currentChat?: string, + ): Promise; public dispose(): void { this.disposables.forEach((d) => d.dispose()); @@ -169,7 +197,8 @@ export abstract class BaseWebViewProvider implements vscode.Disposable { async getContext(files: string[]) { try { - const filesContent: Map | undefined = await this.fileService.getFilesContent(files); + const filesContent: Map | undefined = + await this.fileService.getFilesContent(files); if (filesContent && filesContent.size > 0) { return Array.from(filesContent.values()).join("\n"); } diff --git a/src/webview-providers/groq.ts b/src/webview-providers/groq.ts index a48aa09..5eb3935 100644 --- a/src/webview-providers/groq.ts +++ b/src/webview-providers/groq.ts @@ -9,7 +9,12 @@ export class GroqWebViewProvider extends BaseWebViewProvider { chatHistory: IMessageInput[] = []; readonly model: Groq; private static instance: GroqWebViewProvider; - constructor(extensionUri: vscode.Uri, apiKey: string, generativeAiModel: string, context: vscode.ExtensionContext) { + constructor( + extensionUri: vscode.Uri, + apiKey: string, + generativeAiModel: string, + context: vscode.ExtensionContext, + ) { super(extensionUri, apiKey, generativeAiModel, context); this.model = new Groq({ apiKey: this.apiKey, @@ -21,14 +26,22 @@ export class GroqWebViewProvider extends BaseWebViewProvider { extensionUri: vscode.Uri, apiKey: string, generativeAiModel: string, - context: vscode.ExtensionContext + context: vscode.ExtensionContext, ) { if (!GroqWebViewProvider.instance) { - GroqWebViewProvider.instance = new GroqWebViewProvider(extensionUri, apiKey, generativeAiModel, context); + GroqWebViewProvider.instance = new GroqWebViewProvider( + extensionUri, + apiKey, + generativeAiModel, + context, + ); } } - public async sendResponse(response: string, participant: string): Promise { + public async sendResponse( + response: string, + participant: string, + ): Promise { try { const type = participant === "bot" ? "bot-response" : "user-input"; if (participant === "bot") { @@ -36,19 +49,24 @@ export class GroqWebViewProvider extends BaseWebViewProvider { Message.of({ role: "system", content: response, - }) + }), ); } else { this.chatHistory.push( Message.of({ role: "user", content: response, - }) + }), ); } if (this.chatHistory.length === 2) { - const chatHistory = Memory.has(COMMON.GROQ_CHAT_HISTORY) ? Memory.get(COMMON.GROQ_CHAT_HISTORY) : []; - Memory.set(COMMON.GROQ_CHAT_HISTORY, [...chatHistory, ...this.chatHistory]); + const chatHistory = Memory.has(COMMON.GROQ_CHAT_HISTORY) + ? Memory.get(COMMON.GROQ_CHAT_HISTORY) + : []; + Memory.set(COMMON.GROQ_CHAT_HISTORY, [ + ...chatHistory, + ...this.chatHistory, + ]); } // Once the agent task is done, map the memory into the llm brain. // Send the final answer to the webview here. @@ -61,15 +79,23 @@ export class GroqWebViewProvider extends BaseWebViewProvider { } } - async generateResponse(message: string, metaData?: any): Promise { + async generateResponse( + message: string, + metaData?: any, + ): Promise { try { let context: string | undefined; if (metaData?.context.length > 0) { context = await this.getContext(metaData.context); } const { temperature, max_tokens, top_p, stop } = GROQ_CONFIG; - const userMessage = Message.of({ role: "user", content: `${message} \n context: ${context}` }); - let chatHistory = Memory.has(COMMON.GROQ_CHAT_HISTORY) ? Memory.get(COMMON.GROQ_CHAT_HISTORY) : [userMessage]; + const userMessage = Message.of({ + role: "user", + content: `${message} \n context: ${context}`, + }); + let chatHistory = Memory.has(COMMON.GROQ_CHAT_HISTORY) + ? Memory.get(COMMON.GROQ_CHAT_HISTORY) + : [userMessage]; chatHistory = [...chatHistory, userMessage]; @@ -88,7 +114,9 @@ export class GroqWebViewProvider extends BaseWebViewProvider { } catch (error) { console.error(error); Memory.set(COMMON.GROQ_CHAT_HISTORY, []); - vscode.window.showErrorMessage("Model not responding, please resend your question"); + vscode.window.showErrorMessage( + "Model not responding, please resend your question", + ); return; } } diff --git a/src/webview-providers/manager.ts b/src/webview-providers/manager.ts index 052410f..8ce3858 100644 --- a/src/webview-providers/manager.ts +++ b/src/webview-providers/manager.ts @@ -20,7 +20,7 @@ export class WebViewProviderManager implements vscode.Disposable { extensionUri: vscode.Uri, apiKey: string, model: string, - context: vscode.ExtensionContext + context: vscode.ExtensionContext, ) => BaseWebViewProvider > = new Map(); private webviewView: vscode.WebviewView | undefined; @@ -31,17 +31,27 @@ export class WebViewProviderManager implements vscode.Disposable { static AgentId = "agentId"; // TODO This is hardcoded for now,in upcoming versions, requests will be tagged to respective agents. private readonly logger = new Logger(WebViewProviderManager.name); - private constructor(private readonly extensionContext: vscode.ExtensionContext) { + private constructor( + private readonly extensionContext: vscode.ExtensionContext, + ) { this.orchestrator = Orchestrator.getInstance(); this.agentService = AgentService.getInstance(); this.registerProviders(); - this.disposables.push(this.orchestrator.onModelChange(this.handleModelChange.bind(this))); - this.disposables.push(this.orchestrator.onHistoryUpdated(this.handleHistoryUpdate.bind(this))); + this.disposables.push( + this.orchestrator.onModelChange(this.handleModelChange.bind(this)), + ); + this.disposables.push( + this.orchestrator.onHistoryUpdated(this.handleHistoryUpdate.bind(this)), + ); } - public static getInstance(extensionContext: vscode.ExtensionContext): WebViewProviderManager { + public static getInstance( + extensionContext: vscode.ExtensionContext, + ): WebViewProviderManager { if (!WebViewProviderManager.instance) { - WebViewProviderManager.instance = new WebViewProviderManager(extensionContext); + WebViewProviderManager.instance = new WebViewProviderManager( + extensionContext, + ); } return WebViewProviderManager.instance; } @@ -49,9 +59,18 @@ export class WebViewProviderManager implements vscode.Disposable { private registerProviders(): void { this.providerRegistry.set(generativeAiModels.GEMINI, GeminiWebViewProvider); this.providerRegistry.set(generativeAiModels.GROQ, GroqWebViewProvider); - this.providerRegistry.set(generativeAiModels.ANTHROPIC, AnthropicWebViewProvider); - this.providerRegistry.set(generativeAiModels.GROK, AnthropicWebViewProvider); - this.providerRegistry.set(generativeAiModels.DEEPSEEK, DeepseekWebViewProvider); + this.providerRegistry.set( + generativeAiModels.ANTHROPIC, + AnthropicWebViewProvider, + ); + this.providerRegistry.set( + generativeAiModels.GROK, + AnthropicWebViewProvider, + ); + this.providerRegistry.set( + generativeAiModels.DEEPSEEK, + DeepseekWebViewProvider, + ); } registerWebViewProvider(): vscode.Disposable | undefined { @@ -68,23 +87,37 @@ export class WebViewProviderManager implements vscode.Disposable { const disposable = vscode.window.registerWebviewViewProvider( BaseWebViewProvider.viewId, this.webviewViewProvider, - { webviewOptions: { retainContextWhenHidden: true } } + { webviewOptions: { retainContextWhenHidden: true } }, ); this.disposables.push(disposable); return disposable; } } - private createProvider(modelName: string, apiKey: string, model: string): BaseWebViewProvider | undefined { + private createProvider( + modelName: string, + apiKey: string, + model: string, + ): BaseWebViewProvider | undefined { const providerClass = this.providerRegistry.get(modelName); if (!providerClass) { this.logger.warn(`Provider for model type ${modelName} not found`); return; } - return new providerClass(this.extensionContext.extensionUri, apiKey, model, this.extensionContext); + return new providerClass( + this.extensionContext.extensionUri, + apiKey, + model, + this.extensionContext, + ); } - private async switchProvider(modelName: string, apiKey: string, model: string, onload: boolean): Promise { + private async switchProvider( + modelName: string, + apiKey: string, + model: string, + onload: boolean, + ): Promise { try { const newProvider = this.createProvider(modelName, apiKey, model); if (!newProvider) { @@ -111,7 +144,7 @@ export class WebViewProviderManager implements vscode.Disposable { JSON.stringify({ success: true, modelName, - }) + }), ); } catch (error: any) { this.logger.error(`Error switching provider: ${error}`); @@ -120,13 +153,18 @@ export class WebViewProviderManager implements vscode.Disposable { JSON.stringify({ success: false, modelName, - }) + }), ); throw new Error(error); } } - async initializeProvider(modelName: string, apiKey: string, model: string, onload: boolean): Promise { + async initializeProvider( + modelName: string, + apiKey: string, + model: string, + onload: boolean, + ): Promise { await this.switchProvider(modelName, apiKey, model, onload); } @@ -154,7 +192,9 @@ export class WebViewProviderManager implements vscode.Disposable { } private async getCurrentHistory(): Promise { - const history = await this.agentService.getChatHistory(WebViewProviderManager.AgentId); + const history = await this.agentService.getChatHistory( + WebViewProviderManager.AgentId, + ); return history; } @@ -170,7 +210,10 @@ export class WebViewProviderManager implements vscode.Disposable { } async setCurrentHistory(data: any[]): Promise { - await this.agentService.saveChatHistory(WebViewProviderManager.AgentId, data); + await this.agentService.saveChatHistory( + WebViewProviderManager.AgentId, + data, + ); } async handleHistoryUpdate({ type, message }: IEventPayload) { @@ -188,7 +231,9 @@ export class WebViewProviderManager implements vscode.Disposable { this.currentProvider.dispose(); } this.disposables.forEach((d) => d.dispose()); - this.extensionContext.subscriptions.forEach((subscription) => subscription.dispose()); + this.extensionContext.subscriptions.forEach((subscription) => + subscription.dispose(), + ); this.disposables = []; } }