Skip to content

Commit 2d7e384

Browse files
authoredSep 7, 2024
feat(config): add support for groq AI provider, including config validation and engine implementation (di-sukharev#381)
1 parent a91aa3b commit 2d7e384

File tree

4 files changed

+40
-8
lines changed

4 files changed

+40
-8
lines changed
 

‎src/commands/config.ts

+19-6
Original file line numberDiff line numberDiff line change
@@ -76,6 +76,16 @@ export const MODEL_LIST = {
7676
'gemini-1.0-pro',
7777
'gemini-pro-vision',
7878
'text-embedding-004'
79+
],
80+
81+
groq: [
82+
'llama3-70b-8192', // Meta Llama 3 70B (default one, no daily token limit and 14 400 reqs/day)
83+
'llama3-8b-8192', // Meta Llama 3 8B
84+
'llama-guard-3-8b', // Llama Guard 3 8B
85+
'llama-3.1-8b-instant', // Llama 3.1 8B (Preview)
86+
'llama-3.1-70b-versatile', // Llama 3.1 70B (Preview)
87+
'gemma-7b-it', // Gemma 7B
88+
'gemma2-9b-it' // Gemma 2 9B
7989
]
8090
};
8191

@@ -87,6 +97,8 @@ const getDefaultModel = (provider: string | undefined): string => {
8797
return MODEL_LIST.anthropic[0];
8898
case 'gemini':
8999
return MODEL_LIST.gemini[0];
100+
case 'groq':
101+
return MODEL_LIST.groq[0];
90102
default:
91103
return MODEL_LIST.openai[0];
92104
}
@@ -241,7 +253,7 @@ export const configValidators = {
241253

242254
validateConfig(
243255
CONFIG_KEYS.OCO_AI_PROVIDER,
244-
['openai', 'anthropic', 'gemini', 'azure', 'test', 'flowise'].includes(
256+
['openai', 'anthropic', 'gemini', 'azure', 'test', 'flowise', 'groq'].includes(
245257
value
246258
) || value.startsWith('ollama'),
247259
`${value} is not supported yet, use 'ollama', 'anthropic', 'azure', 'gemini', 'flowise' or 'openai' (default)`
@@ -288,7 +300,8 @@ export enum OCO_AI_PROVIDER_ENUM {
288300
GEMINI = 'gemini',
289301
AZURE = 'azure',
290302
TEST = 'test',
291-
FLOWISE = 'flowise'
303+
FLOWISE = 'flowise',
304+
GROQ = 'groq',
292305
}
293306

294307
export type ConfigType = {
@@ -388,7 +401,7 @@ const getEnvConfig = (envPath: string) => {
388401
OCO_EMOJI: parseConfigVarValue(process.env.OCO_EMOJI),
389402
OCO_LANGUAGE: process.env.OCO_LANGUAGE,
390403
OCO_MESSAGE_TEMPLATE_PLACEHOLDER:
391-
process.env.OCO_MESSAGE_TEMPLATE_PLACEHOLDER,
404+
process.env.OCO_MESSAGE_TEMPLATE_PLACEHOLDER,
392405
OCO_PROMPT_MODULE: process.env.OCO_PROMPT_MODULE as OCO_PROMPT_MODULE_ENUM,
393406
OCO_ONE_LINE_COMMIT: parseConfigVarValue(process.env.OCO_ONE_LINE_COMMIT),
394407
OCO_TEST_MOCK_TYPE: process.env.OCO_TEST_MOCK_TYPE,
@@ -445,9 +458,9 @@ interface GetConfigOptions {
445458
}
446459

447460
export const getConfig = ({
448-
envPath = defaultEnvPath,
449-
globalPath = defaultConfigPath
450-
}: GetConfigOptions = {}): ConfigType => {
461+
envPath = defaultEnvPath,
462+
globalPath = defaultConfigPath
463+
}: GetConfigOptions = {}): ConfigType => {
451464
const envConfig = getEnvConfig(envPath);
452465
const globalConfig = getGlobalConfig(globalPath);
453466

‎src/engine/groq.ts

+10
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,10 @@
1+
import { OpenAiConfig, OpenAiEngine } from './openAi';
2+
3+
interface GroqConfig extends OpenAiConfig {}
4+
5+
export class GroqEngine extends OpenAiEngine {
6+
constructor(config: GroqConfig) {
7+
config.baseURL = 'https://api.groq.com/openai/v1';
8+
super(config);
9+
}
10+
}

‎src/engine/openAi.ts

+7-2
Original file line numberDiff line numberDiff line change
@@ -4,15 +4,20 @@ import { GenerateCommitMessageErrorEnum } from '../generateCommitMessageFromGitD
44
import { tokenCount } from '../utils/tokenCount';
55
import { AiEngine, AiEngineConfig } from './Engine';
66

7-
interface OpenAiConfig extends AiEngineConfig {}
7+
export interface OpenAiConfig extends AiEngineConfig {}
88

99
export class OpenAiEngine implements AiEngine {
1010
config: OpenAiConfig;
1111
client: OpenAI;
1212

1313
constructor(config: OpenAiConfig) {
1414
this.config = config;
15-
this.client = new OpenAI({ apiKey: config.apiKey });
15+
16+
if (!config.baseURL) {
17+
this.client = new OpenAI({ apiKey: config.apiKey });
18+
} else {
19+
this.client = new OpenAI({ apiKey: config.apiKey, baseURL: config.baseURL });
20+
}
1621
}
1722

1823
public generateCommitMessage = async (

‎src/utils/engine.ts

+4
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,7 @@ import { GeminiEngine } from '../engine/gemini';
77
import { OllamaEngine } from '../engine/ollama';
88
import { OpenAiEngine } from '../engine/openAi';
99
import { TestAi, TestMockType } from '../engine/testAi';
10+
import { GroqEngine } from '../engine/groq';
1011

1112
export function getEngine(): AiEngine {
1213
const config = getConfig();
@@ -39,6 +40,9 @@ export function getEngine(): AiEngine {
3940
case OCO_AI_PROVIDER_ENUM.FLOWISE:
4041
return new FlowiseEngine(DEFAULT_CONFIG);
4142

43+
case OCO_AI_PROVIDER_ENUM.GROQ:
44+
return new GroqEngine(DEFAULT_CONFIG);
45+
4246
default:
4347
return new OpenAiEngine(DEFAULT_CONFIG);
4448
}

0 commit comments

Comments
 (0)
Failed to load comments.