Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[Feature]: Adding configurable llmservice endpoint which assumes backend stores the system prompt #392

Open
wants to merge 4 commits into
base: dev
Choose a base branch
from
Open
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Empty file added example.txt
Empty file.
45 changes: 37 additions & 8 deletions src/commands/config.ts
Original file line number Diff line number Diff line change
@@ -34,6 +34,8 @@ export enum CONFIG_KEYS {
OCO_TEST_MOCK_TYPE = 'OCO_TEST_MOCK_TYPE',
OCO_API_URL = 'OCO_API_URL',
OCO_OLLAMA_API_URL = 'OCO_OLLAMA_API_URL',
OCO_BACKEND_ENDPOINT = 'OCO_BACKEND_ENDPOINT',
OCO_BACKEND_PATH = 'OCO_BACKEND_PATH',
OCO_FLOWISE_ENDPOINT = 'OCO_FLOWISE_ENDPOINT',
OCO_FLOWISE_API_KEY = 'OCO_FLOWISE_API_KEY'
}
@@ -45,6 +47,7 @@ export enum CONFIG_MODES {

export const MODEL_LIST = {
openai: [
'gpt-4o-mini',
'gpt-4o-mini',
'gpt-3.5-turbo',
'gpt-3.5-turbo-instruct',
@@ -132,8 +135,9 @@ export const configValidators = {
config.OCO_ANTHROPIC_API_KEY ||
config.OCO_AI_PROVIDER.startsWith('ollama') ||
config.OCO_AZURE_API_KEY ||
config.OCO_AI_PROVIDER == 'test' ||
config.OCO_AI_PROVIDER == 'flowise',
config.OCO_AI_PROVIDER == 'flowise' ||
config.OCO_AI_PROVIDER == 'llmservice' ||
config.OCO_AI_PROVIDER == 'test',
'You need to provide an OpenAI/Anthropic/Azure or other provider API key via `oco config set OCO_OPENAI_API_KEY=your_key`, for help refer to docs https://github.com/di-sukharev/opencommit'
);
validateConfig(
@@ -152,8 +156,9 @@ export const configValidators = {
config.OCO_OPENAI_API_KEY ||
config.OCO_AZURE_API_KEY ||
config.OCO_AI_PROVIDER == 'ollama' ||
config.OCO_AI_PROVIDER == 'test' ||
config.OCO_AI_PROVIDER == 'flowise',
config.OCO_AI_PROVIDER == 'llmservice' ||
config.OCO_AI_PROVIDER == 'flowise' ||
config.OCO_AI_PROVIDER == 'test',
'You need to provide an OpenAI/Anthropic/Azure API key'
);

@@ -179,8 +184,9 @@ export const configValidators = {
value ||
config.OCO_OPENAI_API_KEY ||
config.OCO_AI_PROVIDER == 'ollama' ||
config.OCO_AI_PROVIDER == 'test' ||
config.OCO_AI_PROVIDER == 'flowise',
config.OCO_AI_PROVIDER == 'llmservice' ||
config.OCO_AI_PROVIDER == 'flowise' ||
config.OCO_AI_PROVIDER == 'test',
'You need to provide an OpenAI/Anthropic API key'
);

@@ -323,9 +329,10 @@ export const configValidators = {
'gemini',
'azure',
'test',
'flowise'
'flowise',
'llmservice'
].includes(value) || value.startsWith('ollama'),
`${value} is not supported yet, use 'ollama', 'anthropic', 'azure', 'gemini', 'flowise' or 'openai' (default)`
`${value} is not supported yet, use 'ollama', 'anthropic', 'azure', 'gemini', 'flowise', 'llmservice' or 'openai' (default)`
);
return value;
},
@@ -379,7 +386,26 @@ export const configValidators = {
`${value} is not a valid URL`
);
return value;
},

[CONFIG_KEYS.OCO_BACKEND_ENDPOINT](value: any) {
validateConfig(
CONFIG_KEYS.OCO_BACKEND_ENDPOINT,
typeof value === 'string',
'Must be string'
);
return value;
},

[CONFIG_KEYS.OCO_BACKEND_PATH](value: any) {
validateConfig(
CONFIG_KEYS.OCO_BACKEND_PATH,
typeof value === 'string',
'Must be string'
);
return value;
}

};

export type ConfigType = {
@@ -423,6 +449,8 @@ export const getConfig = ({
process.env.OCO_ONE_LINE_COMMIT === 'true' ? true : false,
OCO_AZURE_ENDPOINT: process.env.OCO_AZURE_ENDPOINT || undefined,
OCO_TEST_MOCK_TYPE: process.env.OCO_TEST_MOCK_TYPE || 'commit-message',
OCO_BACKEND_ENDPOINT: process.env.OCO_BACKEND_ENDPOINT || 'localhost:8000',
OCO_BACKEND_PATH: process.env.OCO_BACKEND_PATH || 'api/generate',
OCO_FLOWISE_ENDPOINT: process.env.OCO_FLOWISE_ENDPOINT || ':',
OCO_FLOWISE_API_KEY: process.env.OCO_FLOWISE_API_KEY || undefined,
OCO_OLLAMA_API_URL: process.env.OCO_OLLAMA_API_URL || undefined
@@ -452,6 +480,7 @@ export const getConfig = ({
`Manually fix the '.env' file or global '~/.opencommit' config file.`
);


process.exit(1);
}
}
37 changes: 37 additions & 0 deletions src/engine/llmservice.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
import axios, { AxiosError } from 'axios';
import { ChatCompletionRequestMessage } from 'openai';
import { AiEngine } from './Engine';
import {
getConfig
} from '../commands/config';

const config = getConfig();


export class LlmService implements AiEngine {

async generateCommitMessage(
messages: Array<ChatCompletionRequestMessage>
): Promise<string | undefined> {

const gitDiff = messages[ messages.length - 1 ]?.content;
const url = `http://${config?.OCO_BACKEND_ENDPOINT}/${config?.OCO_BACKEND_PATH}`;
const payload = {
user_prompt: gitDiff
}

try {
const response = await axios.post(url, payload, {
headers: {
'Content-Type': 'application/json'
}
});
const message = response.data;

return message;
} catch (err: any) {
const message = err.response?.data?.error ?? err.message;
throw new Error('local model issues. details: ' + message);
}
}
}
3 changes: 3 additions & 0 deletions src/utils/engine.ts
Original file line number Diff line number Diff line change
@@ -6,6 +6,7 @@ import { OllamaAi } from '../engine/ollama';
import { AnthropicAi } from '../engine/anthropic'
import { TestAi } from '../engine/testAi';
import { Azure } from '../engine/azure';
import { LlmService } from '../engine/llmservice';
import { FlowiseAi } from '../engine/flowise'

export function getEngine(): AiEngine {
@@ -28,6 +29,8 @@ export function getEngine(): AiEngine {
return new Gemini();
} else if (provider == 'azure') {
return new Azure();
} else if(provider == 'llmservice'){
return new LlmService();
} else if( provider == 'flowise'){
return new FlowiseAi();
}
Loading
Oops, something went wrong.