Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion src/application/constant.ts
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ export enum COMMON {
}
export const GROQ_CONFIG = {
temperature: 0.1,
max_tokens: 60000,
max_tokens: 8192,
top_p: 1,
stream: false,
stop: ["thanks"],
Expand Down
1 change: 0 additions & 1 deletion src/commands/handler.ts
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,6 @@ export abstract class CodeCommandHandler implements ICodeCommandHandler {
private readonly xGrokApiKey: string;
private readonly xGrokModel: string;
private readonly logger: Logger;
// Todo Need to refactor. Only one instance of a model can be created at a time. Therefore no need to retrieve all model information, only retrieve the required model within the application
constructor(
private readonly action: string,
_context: vscode.ExtensionContext,
Expand Down
3 changes: 2 additions & 1 deletion src/emitter/interface.ts
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,8 @@ type AgentEventKeys =
| "onFilesRetrieved"
| "onStrategizing"
| "onModelChange"
| "onModelChangeSuccess";
| "onModelChangeSuccess"
| "onHistoryUpdated";

export type IAgentEventMap = Record<AgentEventKeys, IEventPayload>;

Expand Down
2 changes: 2 additions & 0 deletions src/emitter/publisher.ts
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,8 @@ export class EventEmitter extends BaseEmitter<Record<string, IEventPayload>> {
onModelChangeSuccess: vscode.Event<IEventPayload> = this.createEvent(
"onModelChangeSuccess",
);
onHistoryUpdated: vscode.Event<IEventPayload> =
this.createEvent("onHistoryUpdated");

/**
* Emits a generic event with specified status, message, and optional data.
Expand Down
4 changes: 2 additions & 2 deletions src/extension.ts
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ async function connectToDatabase(context: vscode.ExtensionContext) {

async function createFileDB(context: vscode.ExtensionContext) {
try {
const fileUploader = new FileManager(context, "patterns");
const fileUploader = new FileManager(context, "database");
const files = await fileUploader.getFiles();
if (!files?.find((file) => file.includes("dev.db"))) {
await fileUploader.createFile("dev.db");
Expand All @@ -96,7 +96,6 @@ export async function activate(context: vscode.ExtensionContext) {
logger.info(`Logged into GitHub as ${session?.account.label}`);
Memory.getInstance();

// TODO This is broken. Need to Fix
// const index = CodeIndexingService.createInstance();
// Get each of the folders and call the next line for each
// const result = await index.buildFunctionStructureMap();
Expand Down Expand Up @@ -235,6 +234,7 @@ export async function activate(context: vscode.ExtensionContext) {
selectedGenerativeAiModel,
apiKey,
apiModel,
true,
);
}
context.subscriptions.push(
Expand Down
4 changes: 1 addition & 3 deletions src/infrastructure/logger/logger.ts
Original file line number Diff line number Diff line change
Expand Up @@ -84,9 +84,7 @@ export class Logger {
Logger.config.filePath = path.join(logDir, `codebuddy-${date}.log`);
}
}
if (!Logger.outputChannel) {
Logger.outputChannel = vscode.window.createOutputChannel("CodeBuddy");
}
Logger.outputChannel ??= vscode.window.createOutputChannel("CodeBuddy");
Logger.telemetry = telemetry;
Logger.sessionId = Logger.generateId();
Logger.setTraceId(Logger.generateId());
Expand Down
50 changes: 24 additions & 26 deletions src/llms/gemini/gemini.ts
Original file line number Diff line number Diff line change
Expand Up @@ -61,8 +61,8 @@ export class GeminiLLM
private intializeDisposable(): void {
this.disposables.push(
vscode.workspace.onDidChangeConfiguration(() =>
this.handleConfigurationChange()
)
this.handleConfigurationChange(),
),
);
}

Expand All @@ -72,9 +72,7 @@ export class GeminiLLM
}

static getInstance(config: ILlmConfig) {
if (!GeminiLLM.instance) {
GeminiLLM.instance = new GeminiLLM(config);
}
GeminiLLM.instance ??= new GeminiLLM(config);
return GeminiLLM.instance;
}

Expand All @@ -92,7 +90,7 @@ export class GeminiLLM

public async generateText(
prompt: string,
instruction?: string
instruction?: string,
): Promise<string> {
try {
const model = this.getModel();
Expand Down Expand Up @@ -142,15 +140,15 @@ export class GeminiLLM
}

async generateContentWithTools(
userInput: string
userInput: string,
): Promise<GenerateContentResult> {
try {
await this.buildChatHistory(
userInput,
undefined,
undefined,
undefined,
true
true,
);
const prompt = createPrompt(userInput);
const contents = Memory.get(COMMON.GEMINI_CHAT_HISTORY) as Content[];
Expand Down Expand Up @@ -185,7 +183,7 @@ export class GeminiLLM
*/
private async processToolCalls(
toolCalls: FunctionCall[],
userInput: string
userInput: string,
): Promise<any> {
let finalResult: string | undefined = undefined;
try {
Expand Down Expand Up @@ -221,7 +219,7 @@ export class GeminiLLM
functionCall.name,
functionResult,
undefined,
false
false,
);

const snapShot = this.createSnapShot({
Expand All @@ -239,12 +237,12 @@ export class GeminiLLM
const retry = await vscode.window.showErrorMessage(
`Function call failed: ${error.message}. Retry or abort?`,
"Retry",
"Abort"
"Abort",
);

if (retry === "Retry") {
finalResult = await this.fallBackToGroq(
`User Input: ${this.userQuery} \n Plans: ${userInput}Write production ready code to demonstrate your solution`
`User Input: ${this.userQuery} \n Plans: ${userInput}Write production ready code to demonstrate your solution`,
);
} else {
finalResult = `Function call error: ${error.message}. Falling back to last response.`;
Expand All @@ -256,13 +254,13 @@ export class GeminiLLM
} catch (error) {
console.error("Error processing tool calls", error);
finalResult = await this.fallBackToGroq(
`User Input: ${this.userQuery} \n Plans: ${userInput}Write production ready code to demonstrate your solution`
`User Input: ${this.userQuery} \n Plans: ${userInput}Write production ready code to demonstrate your solution`,
);
}
}

async processUserQuery(
userInput: string
userInput: string,
): Promise<string | GenerateContentResult | undefined> {
let finalResult: string | GenerateContentResult | undefined;
let userQuery = userInput;
Expand All @@ -287,8 +285,8 @@ export class GeminiLLM
const timeoutPromise = new Promise((_, reject) =>
setTimeout(
() => reject(new Error("TImeout Exceeded")),
this.timeOutMs
)
this.timeOutMs,
),
);
const responsePromise = await this.generateContentWithTools(userQuery);
const result = (await Promise.race([
Expand Down Expand Up @@ -321,15 +319,15 @@ export class GeminiLLM
finalResult = await this.groqLLM.generateText(userInput);
if (finalResult) {
finalResult = await this.fallBackToGroq(
`User Input: ${this.userQuery} \n Plans: ${userInput} Write production ready code to demonstrate your solution`
`User Input: ${this.userQuery} \n Plans: ${userInput} Write production ready code to demonstrate your solution`,
);
return finalResult;
}
}
this.lastFunctionCalls.add(currentCallSignatures);
if (this.lastFunctionCalls.size > 10) {
this.lastFunctionCalls = new Set(
[...this.lastFunctionCalls].slice(-10)
[...this.lastFunctionCalls].slice(-10),
);
}
if (toolCalls && toolCalls.length > 0) {
Expand Down Expand Up @@ -366,7 +364,7 @@ export class GeminiLLM
if (snapshot?.length > 0) {
Memory.removeItems(
COMMON.GEMINI_SNAPSHOT,
Memory.get(COMMON.GEMINI_SNAPSHOT).length
Memory.get(COMMON.GEMINI_SNAPSHOT).length,
);
}
this.orchestrator.publish("onQuery", String(finalResult));
Expand All @@ -378,15 +376,15 @@ export class GeminiLLM
// );
console.log("Error processing user query", error);
finalResult = await this.fallBackToGroq(
`User Input: ${this.userQuery} \n Plans: ${userInput}Write production ready code to demonstrate your solution`
`User Input: ${this.userQuery} \n Plans: ${userInput}Write production ready code to demonstrate your solution`,
);
console.log("Model not responding at this time, please try again", error);
}
}

private async handleSingleFunctionCall(
functionCall: FunctionCall,
attempt: number = 0
attempt: number = 0,
): Promise<any> {
const MAX_RETRIES = 3;
const args = functionCall.args as Record<string, any>;
Expand All @@ -408,7 +406,7 @@ export class GeminiLLM
if (attempt < MAX_RETRIES) {
console.warn(
`Retry attempt ${attempt + 1} for function ${name}`,
JSON.stringify({ error, args })
JSON.stringify({ error, args }),
);
return this.handleSingleFunctionCall(functionCall, attempt + 1);
}
Expand All @@ -431,7 +429,7 @@ export class GeminiLLM
functionCall?: any,
functionResponse?: any,
chat?: ChatSession,
isInitialQuery: boolean = false
isInitialQuery: boolean = false,
): Promise<Content[]> {
// Check if it makes sense to kind of seperate agent and Edit Mode memory, when switching.
let chatHistory: any = Memory.get(COMMON.GEMINI_CHAT_HISTORY) || [];
Expand All @@ -457,17 +455,17 @@ export class GeminiLLM
Message.of({
role: "model",
parts: [{ functionCall }],
})
}),
);

const observationResult = await chat.sendMessage(
`Tool result: ${JSON.stringify(functionResponse)}`
`Tool result: ${JSON.stringify(functionResponse)}`,
);
chatHistory.push(
Message.of({
role: "user",
parts: [{ text: observationResult.response.text() }],
})
}),
);
}
if (chatHistory.length > 50) chatHistory = chatHistory.slice(-50);
Expand Down
2 changes: 1 addition & 1 deletion src/services/agent-state.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ import { FileStorage, IStorage } from "./file-storage";

export class AgentService {
private static instance: AgentService;
private storage: IStorage;
private readonly storage: IStorage;

private constructor(storage: IStorage) {
this.storage = storage;
Expand Down
4 changes: 2 additions & 2 deletions src/services/file-storage.ts
Original file line number Diff line number Diff line change
Expand Up @@ -12,11 +12,11 @@ export interface IStorage {
}

export class FileStorage implements IStorage {
private storagePath: string;
private readonly storagePath: string;

constructor() {
this.storagePath = path.join(
vscode.workspace.workspaceFolders?.[0]?.uri.fsPath || "",
vscode.workspace.workspaceFolders?.[0]?.uri.fsPath ?? "",
".codebuddy",
);
if (!fs.existsSync(this.storagePath)) {
Expand Down
6 changes: 3 additions & 3 deletions src/services/url-reranker.ts
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ export class UrlReranker {
if (!title) return 0;

const matches = UrlReranker.KEYWORDS.filter((keyword) =>
title.toLowerCase().includes(keyword.toLowerCase())
title.toLowerCase().includes(keyword.toLowerCase()),
);

return matches.length / UrlReranker.KEYWORDS.length;
Expand Down Expand Up @@ -97,7 +97,7 @@ export class UrlReranker {

const codeBlockCount = this.countCodeBlocks(metadata.content);
const hasAdequateExplanation = this.hasAdequateExplanation(
metadata.content
metadata.content,
);

return codeBlockCount + (hasAdequateExplanation ? 1 : 0);
Expand Down Expand Up @@ -130,7 +130,7 @@ export class UrlReranker {
*/
calculateFinalScore(metadata: IPageMetada): number {
const titleRelevanceScore = this.calculateTitleRelevanceScore(
metadata.title ?? ""
metadata.title ?? "",
);
const reputationScore = this.calculateSourceReputationScore(metadata);
const contentQualityScore = this.calculateContentQualityScore(metadata);
Expand Down
14 changes: 12 additions & 2 deletions src/webview-providers/anthropic.ts
Original file line number Diff line number Diff line change
Expand Up @@ -68,13 +68,23 @@ export class AnthropicWebViewProvider extends BaseWebViewProvider {
}
}

async generateResponse(message: string): Promise<string | undefined> {
async generateResponse(
message: string,
metaData?: any,
): Promise<string | undefined> {
try {
let context: string | undefined;
if (metaData?.context.length > 0) {
context = await this.getContext(metaData.context);
}
const { max_tokens } = GROQ_CONFIG;
if (getGenerativeAiModel() === generativeAiModels.GROK) {
this.baseUrl = getXGroKBaseURL();
}
const userMessage = Message.of({ role: "user", content: message });
const userMessage = Message.of({
role: "user",
content: `${message} \n context: ${context}`,
});
let chatHistory = Memory.has(COMMON.ANTHROPIC_CHAT_HISTORY)
? Memory.get(COMMON.ANTHROPIC_CHAT_HISTORY)
: [userMessage];
Expand Down
24 changes: 7 additions & 17 deletions src/webview-providers/base.ts
Original file line number Diff line number Diff line change
Expand Up @@ -136,17 +136,10 @@ export abstract class BaseWebViewProvider implements vscode.Disposable {
let response: any;
switch (message.command) {
case "user-input":
if (message.metaData.mode === "Agent") {
response = await this.generateResponse(
message.message,
message.metaData,
);
} else {
response = await this.generateResponse(
message.message,
message.metaData,
);
}
response = await this.generateResponse(
message.message,
message.metaData,
);
if (response) {
await this.sendResponse(formatText(response), "bot");
}
Expand All @@ -161,12 +154,9 @@ export abstract class BaseWebViewProvider implements vscode.Disposable {
await this.orchestrator.publish("onModelChange", message);
break;
//Publish an event instead to prevent cyclic dependendency
// case "chat-history-import":
// await this.agentService.saveChatHistory(
// WebViewProviderManager.AgentId,
// JSON.parse(message.message),
// );
// break;
case "messages-updated":
this.orchestrator.publish("onHistoryUpdated", message);
break;
default:
throw new Error("Unknown command");
}
Expand Down
Loading