Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[Feature]: Adding configurable llmservice endpoint which assumes backend stores the system prompt #392

Open
wants to merge 4 commits into
base: dev
Choose a base branch
from
Open
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
merging feature with dev
  • Loading branch information
JMN09 committed Aug 19, 2024
commit 8f1741f0c5e4bc7d90706701b1ec2d009b69fd69
68 changes: 30 additions & 38 deletions out/cli.cjs
Original file line number Diff line number Diff line change
@@ -30730,8 +30730,8 @@ var configValidators = {
return value;
validateConfig(
"OpenAI API_KEY",
value || config12.OCO_ANTHROPIC_API_KEY || config12.OCO_AI_PROVIDER.startsWith("ollama") || config12.OCO_AZURE_API_KEY || config12.OCO_AI_PROVIDER == "llmservice" || config12.OCO_AI_PROVIDER == "test",
"You need to provide an OpenAI/Anthropic/Azure API key"
value || config12.OCO_ANTHROPIC_API_KEY || config12.OCO_AI_PROVIDER.startsWith("ollama") || config12.OCO_AZURE_API_KEY || config12.OCO_AI_PROVIDER == "test" || config12.OCO_AI_PROVIDER == "flowise",
"You need to provide an OpenAI/Anthropic/Azure or other provider API key via `oco config set OCO_OPENAI_API_KEY=your_key`, for help refer to docs https://github.com/di-sukharev/opencommit"
);
validateConfig(
"OCO_OPENAI_API_KEY" /* OCO_OPENAI_API_KEY */,
@@ -30743,7 +30743,7 @@ var configValidators = {
["OCO_AZURE_API_KEY" /* OCO_AZURE_API_KEY */](value, config12 = {}) {
validateConfig(
"ANTHROPIC_API_KEY",
value || config12.OCO_OPENAI_API_KEY || config12.OCO_AZURE_API_KEY || config12.OCO_AI_PROVIDER == "ollama" || config12.OCO_AI_PROVIDER == "llmservice" || config12.OCO_AI_PROVIDER == "test",
value || config12.OCO_OPENAI_API_KEY || config12.OCO_AZURE_API_KEY || config12.OCO_AI_PROVIDER == "ollama" || config12.OCO_AI_PROVIDER == "test" || config12.OCO_AI_PROVIDER == "flowise",
"You need to provide an OpenAI/Anthropic/Azure API key"
);
return value;
@@ -30761,7 +30761,7 @@ var configValidators = {
["OCO_ANTHROPIC_API_KEY" /* OCO_ANTHROPIC_API_KEY */](value, config12 = {}) {
validateConfig(
"ANTHROPIC_API_KEY",
value || config12.OCO_OPENAI_API_KEY || config12.OCO_AI_PROVIDER == "ollama" || config12.OCO_AI_PROVIDER == "llmservice" || config12.OCO_AI_PROVIDER == "test",
value || config12.OCO_OPENAI_API_KEY || config12.OCO_AI_PROVIDER == "ollama" || config12.OCO_AI_PROVIDER == "test" || config12.OCO_AI_PROVIDER == "flowise",
"You need to provide an OpenAI/Anthropic API key"
);
return value;
@@ -30841,11 +30841,7 @@ var configValidators = {
["OCO_MODEL" /* OCO_MODEL */](value, config12 = {}) {
validateConfig(
"OCO_MODEL" /* OCO_MODEL */,
[
...MODEL_LIST.openai,
...MODEL_LIST.anthropic,
...MODEL_LIST.gemini
].includes(value) || config12.OCO_AI_PROVIDER == "ollama" || config12.OCO_AI_PROVIDER == "azure" || config12.OCO_AI_PROVIDER == "llmservice" || config12.OCO_AI_PROVIDER == "test",
typeof value === "string",
`${value} is not supported yet, use:

${[
@@ -30883,8 +30879,16 @@ var configValidators = {
["OCO_AI_PROVIDER" /* OCO_AI_PROVIDER */](value) {
validateConfig(
"OCO_AI_PROVIDER" /* OCO_AI_PROVIDER */,
["", "openai", "anthropic", "gemini", "azure", "llmservice", "test"].includes(value) || value.startsWith("ollama"),
`${value} is not supported yet, use 'ollama', 'llmservice', 'anthropic', 'azure', 'gemini', or 'openai' (default)`
[
"",
"openai",
"anthropic",
"gemini",
"azure",
"test",
"flowise"
].includes(value) || value.startsWith("ollama"),
`${value} is not supported yet, use 'ollama', 'anthropic', 'azure', 'gemini', 'flowise' or 'openai' (default)`
);
return value;
},
@@ -30929,22 +30933,6 @@ var configValidators = {
`${value} is not a valid URL`
);
return value;
},
["OCO_BACKEND_ENDPOINT" /* OCO_BACKEND_ENDPOINT */](value) {
validateConfig(
"OCO_BACKEND_ENDPOINT" /* OCO_BACKEND_ENDPOINT */,
typeof value === "string",
"Must be string"
);
return value;
},
["OCO_BACKEND_PATH" /* OCO_BACKEND_PATH */](value) {
validateConfig(
"OCO_BACKEND_PATH" /* OCO_BACKEND_PATH */,
typeof value === "string",
"Must be string"
);
return value;
}
};
var defaultConfigPath = (0, import_path.join)((0, import_os.homedir)(), ".opencommit");
@@ -30974,8 +30962,9 @@ var getConfig = ({
OCO_ONE_LINE_COMMIT: process.env.OCO_ONE_LINE_COMMIT === "true" ? true : false,
OCO_AZURE_ENDPOINT: process.env.OCO_AZURE_ENDPOINT || void 0,
OCO_TEST_MOCK_TYPE: process.env.OCO_TEST_MOCK_TYPE || "commit-message",
OCO_BACKEND_ENDPOINT: process.env.OCO_BACKEND_ENDPOINT || "localhost:8000",
OCO_BACKEND_PATH: process.env.OCO_BACKEND_PATH || "api/generate"
OCO_FLOWISE_ENDPOINT: process.env.OCO_FLOWISE_ENDPOINT || ":",
OCO_FLOWISE_API_KEY: process.env.OCO_FLOWISE_API_KEY || void 0,
OCO_OLLAMA_API_URL: process.env.OCO_OLLAMA_API_URL || void 0
};
const configExists = (0, import_fs.existsSync)(configPath);
if (!configExists)
@@ -41049,15 +41038,18 @@ var Azure = class {
};
var azure = new Azure();

// src/engine/llmservice.ts
// src/engine/flowise.ts
var config7 = getConfig();
var LlmService = class {
var FlowiseAi = class {
async generateCommitMessage(messages) {
const gitDiff = messages[messages.length - 1]?.content;
const url2 = `http://${config7?.OCO_BACKEND_ENDPOINT}/${config7?.OCO_BACKEND_PATH}`;
const gitDiff = messages[messages.length - 1]?.content?.replace(/\\/g, "\\\\").replace(/"/g, '\\"').replace(/\n/g, "\\n").replace(/\r/g, "\\r").replace(/\t/g, "\\t");
const url2 = `http://${config7?.OCO_FLOWISE_ENDPOINT}/api/v1/prediction/${config7?.OCO_FLOWISE_API_KEY}`;
const payload = {
system_prompt: messages[0]?.content,
user_prompt: gitDiff
question: gitDiff,
overrideConfig: {
systemMessagePrompt: messages[0]?.content
},
history: messages.slice(1, -1)
};
try {
const response = await axios_default.post(url2, payload, {
@@ -41066,7 +41058,7 @@ var LlmService = class {
}
});
const message = response.data;
return message;
return message?.text;
} catch (err) {
const message = err.response?.data?.error ?? err.message;
throw new Error("local model issues. details: " + message);
@@ -41094,8 +41086,8 @@ function getEngine() {
return new Gemini();
} else if (provider4 == "azure") {
return new Azure();
} else if (provider4 == "llmservice") {
return new LlmService();
} else if (provider4 == "flowise") {
return new FlowiseAi();
}
return new OpenAi();
}
68 changes: 30 additions & 38 deletions out/github-action.cjs
Original file line number Diff line number Diff line change
@@ -49537,8 +49537,8 @@ var configValidators = {
return value;
validateConfig(
"OpenAI API_KEY",
value || config11.OCO_ANTHROPIC_API_KEY || config11.OCO_AI_PROVIDER.startsWith("ollama") || config11.OCO_AZURE_API_KEY || config11.OCO_AI_PROVIDER == "llmservice" || config11.OCO_AI_PROVIDER == "test",
"You need to provide an OpenAI/Anthropic/Azure API key"
value || config11.OCO_ANTHROPIC_API_KEY || config11.OCO_AI_PROVIDER.startsWith("ollama") || config11.OCO_AZURE_API_KEY || config11.OCO_AI_PROVIDER == "test" || config11.OCO_AI_PROVIDER == "flowise",
"You need to provide an OpenAI/Anthropic/Azure or other provider API key via `oco config set OCO_OPENAI_API_KEY=your_key`, for help refer to docs https://github.com/di-sukharev/opencommit"
);
validateConfig(
"OCO_OPENAI_API_KEY" /* OCO_OPENAI_API_KEY */,
@@ -49550,7 +49550,7 @@ var configValidators = {
["OCO_AZURE_API_KEY" /* OCO_AZURE_API_KEY */](value, config11 = {}) {
validateConfig(
"ANTHROPIC_API_KEY",
value || config11.OCO_OPENAI_API_KEY || config11.OCO_AZURE_API_KEY || config11.OCO_AI_PROVIDER == "ollama" || config11.OCO_AI_PROVIDER == "llmservice" || config11.OCO_AI_PROVIDER == "test",
value || config11.OCO_OPENAI_API_KEY || config11.OCO_AZURE_API_KEY || config11.OCO_AI_PROVIDER == "ollama" || config11.OCO_AI_PROVIDER == "test" || config11.OCO_AI_PROVIDER == "flowise",
"You need to provide an OpenAI/Anthropic/Azure API key"
);
return value;
@@ -49568,7 +49568,7 @@ var configValidators = {
["OCO_ANTHROPIC_API_KEY" /* OCO_ANTHROPIC_API_KEY */](value, config11 = {}) {
validateConfig(
"ANTHROPIC_API_KEY",
value || config11.OCO_OPENAI_API_KEY || config11.OCO_AI_PROVIDER == "ollama" || config11.OCO_AI_PROVIDER == "llmservice" || config11.OCO_AI_PROVIDER == "test",
value || config11.OCO_OPENAI_API_KEY || config11.OCO_AI_PROVIDER == "ollama" || config11.OCO_AI_PROVIDER == "test" || config11.OCO_AI_PROVIDER == "flowise",
"You need to provide an OpenAI/Anthropic API key"
);
return value;
@@ -49648,11 +49648,7 @@ var configValidators = {
["OCO_MODEL" /* OCO_MODEL */](value, config11 = {}) {
validateConfig(
"OCO_MODEL" /* OCO_MODEL */,
[
...MODEL_LIST.openai,
...MODEL_LIST.anthropic,
...MODEL_LIST.gemini
].includes(value) || config11.OCO_AI_PROVIDER == "ollama" || config11.OCO_AI_PROVIDER == "azure" || config11.OCO_AI_PROVIDER == "llmservice" || config11.OCO_AI_PROVIDER == "test",
typeof value === "string",
`${value} is not supported yet, use:

${[
@@ -49690,8 +49686,16 @@ var configValidators = {
["OCO_AI_PROVIDER" /* OCO_AI_PROVIDER */](value) {
validateConfig(
"OCO_AI_PROVIDER" /* OCO_AI_PROVIDER */,
["", "openai", "anthropic", "gemini", "azure", "llmservice", "test"].includes(value) || value.startsWith("ollama"),
`${value} is not supported yet, use 'ollama', 'llmservice', 'anthropic', 'azure', 'gemini', or 'openai' (default)`
[
"",
"openai",
"anthropic",
"gemini",
"azure",
"test",
"flowise"
].includes(value) || value.startsWith("ollama"),
`${value} is not supported yet, use 'ollama', 'anthropic', 'azure', 'gemini', 'flowise' or 'openai' (default)`
);
return value;
},
@@ -49736,22 +49740,6 @@ var configValidators = {
`${value} is not a valid URL`
);
return value;
},
["OCO_BACKEND_ENDPOINT" /* OCO_BACKEND_ENDPOINT */](value) {
validateConfig(
"OCO_BACKEND_ENDPOINT" /* OCO_BACKEND_ENDPOINT */,
typeof value === "string",
"Must be string"
);
return value;
},
["OCO_BACKEND_PATH" /* OCO_BACKEND_PATH */](value) {
validateConfig(
"OCO_BACKEND_PATH" /* OCO_BACKEND_PATH */,
typeof value === "string",
"Must be string"
);
return value;
}
};
var defaultConfigPath = (0, import_path.join)((0, import_os.homedir)(), ".opencommit");
@@ -49781,8 +49769,9 @@ var getConfig = ({
OCO_ONE_LINE_COMMIT: process.env.OCO_ONE_LINE_COMMIT === "true" ? true : false,
OCO_AZURE_ENDPOINT: process.env.OCO_AZURE_ENDPOINT || void 0,
OCO_TEST_MOCK_TYPE: process.env.OCO_TEST_MOCK_TYPE || "commit-message",
OCO_BACKEND_ENDPOINT: process.env.OCO_BACKEND_ENDPOINT || "localhost:8000",
OCO_BACKEND_PATH: process.env.OCO_BACKEND_PATH || "api/generate"
OCO_FLOWISE_ENDPOINT: process.env.OCO_FLOWISE_ENDPOINT || ":",
OCO_FLOWISE_API_KEY: process.env.OCO_FLOWISE_API_KEY || void 0,
OCO_OLLAMA_API_URL: process.env.OCO_OLLAMA_API_URL || void 0
};
const configExists = (0, import_fs.existsSync)(configPath);
if (!configExists)
@@ -59856,15 +59845,18 @@ var Azure = class {
};
var azure = new Azure();

// src/engine/llmservice.ts
// src/engine/flowise.ts
var config7 = getConfig();
var LlmService = class {
var FlowiseAi = class {
async generateCommitMessage(messages) {
const gitDiff = messages[messages.length - 1]?.content;
const url2 = `http://${config7?.OCO_BACKEND_ENDPOINT}/${config7?.OCO_BACKEND_PATH}`;
const gitDiff = messages[messages.length - 1]?.content?.replace(/\\/g, "\\\\").replace(/"/g, '\\"').replace(/\n/g, "\\n").replace(/\r/g, "\\r").replace(/\t/g, "\\t");
const url2 = `http://${config7?.OCO_FLOWISE_ENDPOINT}/api/v1/prediction/${config7?.OCO_FLOWISE_API_KEY}`;
const payload = {
system_prompt: messages[0]?.content,
user_prompt: gitDiff
question: gitDiff,
overrideConfig: {
systemMessagePrompt: messages[0]?.content
},
history: messages.slice(1, -1)
};
try {
const response = await axios_default.post(url2, payload, {
@@ -59873,7 +59865,7 @@ var LlmService = class {
}
});
const message = response.data;
return message;
return message?.text;
} catch (err) {
const message = err.response?.data?.error ?? err.message;
throw new Error("local model issues. details: " + message);
@@ -59901,8 +59893,8 @@ function getEngine() {
return new Gemini();
} else if (provider4 == "azure") {
return new Azure();
} else if (provider4 == "llmservice") {
return new LlmService();
} else if (provider4 == "flowise") {
return new FlowiseAi();
}
return new OpenAi();
}
38 changes: 23 additions & 15 deletions src/commands/config.ts
Original file line number Diff line number Diff line change
@@ -35,7 +35,9 @@ export enum CONFIG_KEYS {
OCO_API_URL = 'OCO_API_URL',
OCO_OLLAMA_API_URL = 'OCO_OLLAMA_API_URL',
OCO_BACKEND_ENDPOINT = 'OCO_BACKEND_ENDPOINT',
OCO_BACKEND_PATH = 'OCO_BACKEND_PATH'
OCO_BACKEND_PATH = 'OCO_BACKEND_PATH',
OCO_FLOWISE_ENDPOINT = 'OCO_FLOWISE_ENDPOINT',
OCO_FLOWISE_API_KEY = 'OCO_FLOWISE_API_KEY'
}

export enum CONFIG_MODES {
@@ -132,9 +134,10 @@ export const configValidators = {
config.OCO_ANTHROPIC_API_KEY ||
config.OCO_AI_PROVIDER.startsWith('ollama') ||
config.OCO_AZURE_API_KEY ||
config.OCO_AI_PROVIDER == 'flowise' ||
config.OCO_AI_PROVIDER == 'llmservice' ||
config.OCO_AI_PROVIDER == 'test',
'You need to provide an OpenAI/Anthropic/Azure API key'
'You need to provide an OpenAI/Anthropic/Azure or other provider API key via `oco config set OCO_OPENAI_API_KEY=your_key`, for help refer to docs https://github.com/di-sukharev/opencommit'
);
validateConfig(
CONFIG_KEYS.OCO_OPENAI_API_KEY,
@@ -153,6 +156,7 @@ export const configValidators = {
config.OCO_AZURE_API_KEY ||
config.OCO_AI_PROVIDER == 'ollama' ||
config.OCO_AI_PROVIDER == 'llmservice' ||
config.OCO_AI_PROVIDER == 'flowise' ||
config.OCO_AI_PROVIDER == 'test',
'You need to provide an OpenAI/Anthropic/Azure API key'
);
@@ -180,6 +184,7 @@ export const configValidators = {
config.OCO_OPENAI_API_KEY ||
config.OCO_AI_PROVIDER == 'ollama' ||
config.OCO_AI_PROVIDER == 'llmservice' ||
config.OCO_AI_PROVIDER == 'flowise' ||
config.OCO_AI_PROVIDER == 'test',
'You need to provide an OpenAI/Anthropic API key'
);
@@ -276,15 +281,7 @@ export const configValidators = {
[CONFIG_KEYS.OCO_MODEL](value: any, config: any = {}) {
validateConfig(
CONFIG_KEYS.OCO_MODEL,
[
...MODEL_LIST.openai,
...MODEL_LIST.anthropic,
...MODEL_LIST.gemini
].includes(value) ||
config.OCO_AI_PROVIDER == 'ollama' ||
config.OCO_AI_PROVIDER == 'azure' ||
config.OCO_AI_PROVIDER == 'llmservice' ||
config.OCO_AI_PROVIDER == 'test',
typeof value === 'string',
`${value} is not supported yet, use:\n\n ${[
...MODEL_LIST.openai,
...MODEL_LIST.anthropic,
@@ -324,9 +321,17 @@ export const configValidators = {
[CONFIG_KEYS.OCO_AI_PROVIDER](value: any) {
validateConfig(
CONFIG_KEYS.OCO_AI_PROVIDER,
['', 'openai', 'anthropic', 'gemini', 'azure', 'llmservice', 'test'].includes(value) ||
value.startsWith('ollama'),
`${value} is not supported yet, use 'ollama', 'llmservice', 'anthropic', 'azure', 'gemini', or 'openai' (default)`
[
'',
'openai',
'anthropic',
'gemini',
'azure',
'test',
'flowise',
'llmservice'
].includes(value) || value.startsWith('ollama'),
`${value} is not supported yet, use 'ollama', 'anthropic', 'azure', 'gemini', 'flowise', 'llmservice' or 'openai' (default)`
);
return value;
},
@@ -444,7 +449,10 @@ export const getConfig = ({
OCO_AZURE_ENDPOINT: process.env.OCO_AZURE_ENDPOINT || undefined,
OCO_TEST_MOCK_TYPE: process.env.OCO_TEST_MOCK_TYPE || 'commit-message',
OCO_BACKEND_ENDPOINT: process.env.OCO_BACKEND_ENDPOINT || 'localhost:8000',
OCO_BACKEND_PATH: process.env.OCO_BACKEND_PATH || 'api/generate'
OCO_BACKEND_PATH: process.env.OCO_BACKEND_PATH || 'api/generate',
OCO_FLOWISE_ENDPOINT: process.env.OCO_FLOWISE_ENDPOINT || ':',
OCO_FLOWISE_API_KEY: process.env.OCO_FLOWISE_API_KEY || undefined,
OCO_OLLAMA_API_URL: process.env.OCO_OLLAMA_API_URL || undefined
};
const configExists = existsSync(configPath);
if (!configExists) return configFromEnv;
3 changes: 3 additions & 0 deletions src/utils/engine.ts
Original file line number Diff line number Diff line change
@@ -7,6 +7,7 @@ import { AnthropicAi } from '../engine/anthropic'
import { TestAi } from '../engine/testAi';
import { Azure } from '../engine/azure';
import { LlmService } from '../engine/llmservice';
import { FlowiseAi } from '../engine/flowise'

export function getEngine(): AiEngine {
const config = getConfig();
@@ -30,6 +31,8 @@ export function getEngine(): AiEngine {
return new Azure();
} else if(provider == 'llmservice'){
return new LlmService();
} else if( provider == 'flowise'){
return new FlowiseAi();
}

//open ai gpt by default
You are viewing a condensed version of this merge commit. You can view the full changes here.