Skip to content

Commit 2769121

Browse files
di-sukharevmatscubeozeliurs
authoredSep 7, 2024
* feat(config): add support for groq AI provider, including config validation and engine implementation (di-sukharev#381) * fix migrations (di-sukharev#414) --------- Co-authored-by: Takanori Matsumoto <matscube@gmail.com> Co-authored-by: BILLY Maxime <ozeliurs@gmail.com>
1 parent 8ae927e commit 2769121

11 files changed

+199
-28
lines changed
 

‎out/cli.cjs

+65-9
Original file line numberDiff line numberDiff line change
@@ -27331,7 +27331,7 @@ function G3(t2, e3) {
2733127331
// package.json
2733227332
var package_default = {
2733327333
name: "opencommit",
27334-
version: "3.2.1",
27334+
version: "3.2.2",
2733527335
description: "Auto-generate impressive commits in 1 second. Killing lame commits with AI \u{1F92F}\u{1F52B}",
2733627336
keywords: [
2733727337
"git",
@@ -29918,6 +29918,15 @@ var MODEL_LIST = {
2991829918
"gemini-1.0-pro",
2991929919
"gemini-pro-vision",
2992029920
"text-embedding-004"
29921+
],
29922+
groq: [
29923+
"llama3-70b-8192",
29924+
"llama3-8b-8192",
29925+
"llama-guard-3-8b",
29926+
"llama-3.1-8b-instant",
29927+
"llama-3.1-70b-versatile",
29928+
"gemma-7b-it",
29929+
"gemma2-9b-it"
2992129930
]
2992229931
};
2992329932
var getDefaultModel = (provider) => {
@@ -29928,6 +29937,8 @@ var getDefaultModel = (provider) => {
2992829937
return MODEL_LIST.anthropic[0];
2992929938
case "gemini":
2993029939
return MODEL_LIST.gemini[0];
29940+
case "groq":
29941+
return MODEL_LIST.groq[0];
2993129942
default:
2993229943
return MODEL_LIST.openai[0];
2993329944
}
@@ -30051,9 +30062,15 @@ var configValidators = {
3005130062
value = "openai";
3005230063
validateConfig(
3005330064
"OCO_AI_PROVIDER" /* OCO_AI_PROVIDER */,
30054-
["openai", "anthropic", "gemini", "azure", "test", "flowise"].includes(
30055-
value
30056-
) || value.startsWith("ollama"),
30065+
[
30066+
"openai",
30067+
"anthropic",
30068+
"gemini",
30069+
"azure",
30070+
"test",
30071+
"flowise",
30072+
"groq"
30073+
].includes(value) || value.startsWith("ollama"),
3005730074
`${value} is not supported yet, use 'ollama', 'anthropic', 'azure', 'gemini', 'flowise' or 'openai' (default)`
3005830075
);
3005930076
return value;
@@ -30093,6 +30110,7 @@ var OCO_AI_PROVIDER_ENUM = /* @__PURE__ */ ((OCO_AI_PROVIDER_ENUM2) => {
3009330110
OCO_AI_PROVIDER_ENUM2["AZURE"] = "azure";
3009430111
OCO_AI_PROVIDER_ENUM2["TEST"] = "test";
3009530112
OCO_AI_PROVIDER_ENUM2["FLOWISE"] = "flowise";
30113+
OCO_AI_PROVIDER_ENUM2["GROQ"] = "groq";
3009630114
return OCO_AI_PROVIDER_ENUM2;
3009730115
})(OCO_AI_PROVIDER_ENUM || {});
3009830116
var defaultConfigPath = (0, import_path.join)((0, import_os.homedir)(), ".opencommit");
@@ -30109,7 +30127,6 @@ var DEFAULT_CONFIG = {
3010930127
OCO_AI_PROVIDER: "openai" /* OPENAI */,
3011030128
OCO_ONE_LINE_COMMIT: false,
3011130129
OCO_TEST_MOCK_TYPE: "commit-message",
30112-
OCO_FLOWISE_ENDPOINT: ":",
3011330130
OCO_WHY: false,
3011430131
OCO_GITPUSH: true
3011530132
};
@@ -30169,14 +30186,34 @@ var mergeConfigs = (main, fallback) => {
3016930186
return acc;
3017030187
}, {});
3017130188
};
30189+
var cleanUndefinedValues = (config7) => {
30190+
return Object.fromEntries(
30191+
Object.entries(config7).map(([_7, v5]) => {
30192+
try {
30193+
if (typeof v5 === "string") {
30194+
if (v5 === "undefined")
30195+
return [_7, void 0];
30196+
if (v5 === "null")
30197+
return [_7, null];
30198+
const parsedValue = JSON.parse(v5);
30199+
return [_7, parsedValue];
30200+
}
30201+
return [_7, v5];
30202+
} catch (error) {
30203+
return [_7, v5];
30204+
}
30205+
})
30206+
);
30207+
};
3017230208
var getConfig = ({
3017330209
envPath = defaultEnvPath,
3017430210
globalPath = defaultConfigPath
3017530211
} = {}) => {
3017630212
const envConfig = getEnvConfig(envPath);
3017730213
const globalConfig = getGlobalConfig(globalPath);
3017830214
const config7 = mergeConfigs(envConfig, globalConfig);
30179-
return config7;
30215+
const cleanConfig = cleanUndefinedValues(config7);
30216+
return cleanConfig;
3018030217
};
3018130218
var setConfig = (keyValues, globalConfigPath = defaultConfigPath) => {
3018230219
const config7 = getConfig({
@@ -44471,7 +44508,19 @@ var OpenAiEngine = class {
4447144508
}
4447244509
};
4447344510
this.config = config7;
44474-
this.client = new OpenAI({ apiKey: config7.apiKey });
44511+
if (!config7.baseURL) {
44512+
this.client = new OpenAI({ apiKey: config7.apiKey });
44513+
} else {
44514+
this.client = new OpenAI({ apiKey: config7.apiKey, baseURL: config7.baseURL });
44515+
}
44516+
}
44517+
};
44518+
44519+
// src/engine/groq.ts
44520+
var GroqEngine = class extends OpenAiEngine {
44521+
constructor(config7) {
44522+
config7.baseURL = "https://api.groq.com/openai/v1";
44523+
super(config7);
4447544524
}
4447644525
};
4447744526

@@ -44499,6 +44548,8 @@ function getEngine() {
4449944548
return new AzureEngine(DEFAULT_CONFIG2);
4450044549
case "flowise" /* FLOWISE */:
4450144550
return new FlowiseEngine(DEFAULT_CONFIG2);
44551+
case "groq" /* GROQ */:
44552+
return new GroqEngine(DEFAULT_CONFIG2);
4450244553
default:
4450344554
return new OpenAiEngine(DEFAULT_CONFIG2);
4450444555
}
@@ -45342,7 +45393,10 @@ ${source_default.grey("\u2014\u2014\u2014\u2014\u2014\u2014\u2014\u2014\u2014\u2
4534245393
}
4534345394
}
4534445395
} catch (error) {
45345-
commitGenerationSpinner.stop("\u{1F4DD} Commit message generated");
45396+
commitGenerationSpinner.stop(
45397+
`${source_default.red("\u2716")} Failed to generate the commit message`
45398+
);
45399+
console.log(error);
4534645400
const err = error;
4534745401
ce(`${source_default.red("\u2716")} ${err?.message || err}`);
4534845402
process.exit(1);
@@ -45677,11 +45731,12 @@ function set_missing_default_values_default() {
4567745731
const entriesToSet = [];
4567845732
for (const entry of Object.entries(DEFAULT_CONFIG)) {
4567945733
const [key, _value] = entry;
45680-
if (config7[key] === "undefined")
45734+
if (config7[key] === "undefined" || config7[key] === void 0)
4568145735
entriesToSet.push(entry);
4568245736
}
4568345737
if (entriesToSet.length > 0)
4568445738
setConfig(entriesToSet);
45739+
console.log(entriesToSet);
4568545740
};
4568645741
setDefaultConfigValues(getGlobalConfig());
4568745742
}
@@ -45738,6 +45793,7 @@ var runMigrations = async () => {
4573845793
ce(
4573945794
`${source_default.red("Failed to apply migration")} ${migration.name}: ${error}`
4574045795
);
45796+
process.exit(1);
4574145797
}
4574245798
isMigrated = true;
4574345799
}

‎out/github-action.cjs

+56-6
Original file line numberDiff line numberDiff line change
@@ -48730,6 +48730,15 @@ var MODEL_LIST = {
4873048730
"gemini-1.0-pro",
4873148731
"gemini-pro-vision",
4873248732
"text-embedding-004"
48733+
],
48734+
groq: [
48735+
"llama3-70b-8192",
48736+
"llama3-8b-8192",
48737+
"llama-guard-3-8b",
48738+
"llama-3.1-8b-instant",
48739+
"llama-3.1-70b-versatile",
48740+
"gemma-7b-it",
48741+
"gemma2-9b-it"
4873348742
]
4873448743
};
4873548744
var getDefaultModel = (provider) => {
@@ -48740,6 +48749,8 @@ var getDefaultModel = (provider) => {
4874048749
return MODEL_LIST.anthropic[0];
4874148750
case "gemini":
4874248751
return MODEL_LIST.gemini[0];
48752+
case "groq":
48753+
return MODEL_LIST.groq[0];
4874348754
default:
4874448755
return MODEL_LIST.openai[0];
4874548756
}
@@ -48863,9 +48874,15 @@ var configValidators = {
4886348874
value = "openai";
4886448875
validateConfig(
4886548876
"OCO_AI_PROVIDER" /* OCO_AI_PROVIDER */,
48866-
["openai", "anthropic", "gemini", "azure", "test", "flowise"].includes(
48867-
value
48868-
) || value.startsWith("ollama"),
48877+
[
48878+
"openai",
48879+
"anthropic",
48880+
"gemini",
48881+
"azure",
48882+
"test",
48883+
"flowise",
48884+
"groq"
48885+
].includes(value) || value.startsWith("ollama"),
4886948886
`${value} is not supported yet, use 'ollama', 'anthropic', 'azure', 'gemini', 'flowise' or 'openai' (default)`
4887048887
);
4887148888
return value;
@@ -48911,7 +48928,6 @@ var DEFAULT_CONFIG = {
4891148928
OCO_AI_PROVIDER: "openai" /* OPENAI */,
4891248929
OCO_ONE_LINE_COMMIT: false,
4891348930
OCO_TEST_MOCK_TYPE: "commit-message",
48914-
OCO_FLOWISE_ENDPOINT: ":",
4891548931
OCO_WHY: false,
4891648932
OCO_GITPUSH: true
4891748933
};
@@ -48971,14 +48987,34 @@ var mergeConfigs = (main, fallback) => {
4897148987
return acc;
4897248988
}, {});
4897348989
};
48990+
var cleanUndefinedValues = (config6) => {
48991+
return Object.fromEntries(
48992+
Object.entries(config6).map(([_3, v2]) => {
48993+
try {
48994+
if (typeof v2 === "string") {
48995+
if (v2 === "undefined")
48996+
return [_3, void 0];
48997+
if (v2 === "null")
48998+
return [_3, null];
48999+
const parsedValue = JSON.parse(v2);
49000+
return [_3, parsedValue];
49001+
}
49002+
return [_3, v2];
49003+
} catch (error) {
49004+
return [_3, v2];
49005+
}
49006+
})
49007+
);
49008+
};
4897449009
var getConfig = ({
4897549010
envPath = defaultEnvPath,
4897649011
globalPath = defaultConfigPath
4897749012
} = {}) => {
4897849013
const envConfig = getEnvConfig(envPath);
4897949014
const globalConfig = getGlobalConfig(globalPath);
4898049015
const config6 = mergeConfigs(envConfig, globalConfig);
48981-
return config6;
49016+
const cleanConfig = cleanUndefinedValues(config6);
49017+
return cleanConfig;
4898249018
};
4898349019
var setConfig = (keyValues, globalConfigPath = defaultConfigPath) => {
4898449020
const config6 = getConfig({
@@ -63273,7 +63309,19 @@ var OpenAiEngine = class {
6327363309
}
6327463310
};
6327563311
this.config = config6;
63276-
this.client = new OpenAI({ apiKey: config6.apiKey });
63312+
if (!config6.baseURL) {
63313+
this.client = new OpenAI({ apiKey: config6.apiKey });
63314+
} else {
63315+
this.client = new OpenAI({ apiKey: config6.apiKey, baseURL: config6.baseURL });
63316+
}
63317+
}
63318+
};
63319+
63320+
// src/engine/groq.ts
63321+
var GroqEngine = class extends OpenAiEngine {
63322+
constructor(config6) {
63323+
config6.baseURL = "https://api.groq.com/openai/v1";
63324+
super(config6);
6327763325
}
6327863326
};
6327963327

@@ -63301,6 +63349,8 @@ function getEngine() {
6330163349
return new AzureEngine(DEFAULT_CONFIG2);
6330263350
case "flowise" /* FLOWISE */:
6330363351
return new FlowiseEngine(DEFAULT_CONFIG2);
63352+
case "groq" /* GROQ */:
63353+
return new GroqEngine(DEFAULT_CONFIG2);
6330463354
default:
6330563355
return new OpenAiEngine(DEFAULT_CONFIG2);
6330663356
}

‎package-lock.json

+2-2
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

‎package.json

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
{
22
"name": "opencommit",
3-
"version": "3.2.1",
3+
"version": "3.2.2",
44
"description": "Auto-generate impressive commits in 1 second. Killing lame commits with AI 🤯🔫",
55
"keywords": [
66
"git",

‎src/commands/commit.ts

+5-1
Original file line numberDiff line numberDiff line change
@@ -183,7 +183,11 @@ ${chalk.grey('——————————————————')}`
183183
}
184184
}
185185
} catch (error) {
186-
commitGenerationSpinner.stop('📝 Commit message generated');
186+
commitGenerationSpinner.stop(
187+
`${chalk.red('✖')} Failed to generate the commit message`
188+
);
189+
190+
console.log(error);
187191

188192
const err = error as Error;
189193
outro(`${chalk.red('✖')} ${err?.message || err}`);

0 commit comments

Comments
 (0)
Failed to load comments.