Skip to content

Commit 8564135

Browse files
authored
feat(openclaw): add llmProvider/llmModel plugin config options (#274)
Add llmProvider, llmModel, and llmApiKeyEnv to the plugin config schema. These allow users to choose which LLM Hindsight uses directly from openclaw.json config without needing HINDSIGHT_API_LLM_* env vars. Priority order (highest to lowest): 1. HINDSIGHT_API_LLM_PROVIDER env var (unchanged) 2. Plugin config llmProvider/llmModel (NEW) 3. Auto-detect from provider env vars (unchanged) Backward compatible: no config = same behavior as before.
1 parent 44d9125 commit 8564135

4 files changed

Lines changed: 76 additions & 12 deletions

File tree

hindsight-integrations/openclaw/openclaw.plugin.json

Lines changed: 25 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -24,6 +24,19 @@
2424
"type": "string",
2525
"description": "hindsight-embed version to use (e.g. 'latest', '0.4.2', or empty for latest)",
2626
"default": "latest"
27+
},
28+
"llmProvider": {
29+
"type": "string",
30+
"description": "LLM provider for Hindsight memory (e.g. 'openai', 'anthropic', 'gemini', 'groq', 'ollama'). Takes priority over auto-detection but not over HINDSIGHT_API_LLM_PROVIDER env var.",
31+
"enum": ["openai", "anthropic", "gemini", "groq", "ollama"]
32+
},
33+
"llmModel": {
34+
"type": "string",
35+
"description": "LLM model to use (e.g. 'gpt-4o-mini', 'claude-3-5-haiku-20241022'). Used with llmProvider."
36+
},
37+
"llmApiKeyEnv": {
38+
"type": "string",
39+
"description": "Name of the env var holding the API key (e.g. 'MY_CUSTOM_KEY'). If not set, uses the standard env var for the chosen provider."
2740
}
2841
},
2942
"additionalProperties": false
@@ -44,6 +57,18 @@
4457
"embedVersion": {
4558
"label": "Hindsight Embed Version",
4659
"placeholder": "latest (or pin to specific version like 0.4.2)"
60+
},
61+
"llmProvider": {
62+
"label": "LLM Provider",
63+
"placeholder": "e.g. openai, anthropic, gemini, groq"
64+
},
65+
"llmModel": {
66+
"label": "LLM Model",
67+
"placeholder": "e.g. gpt-4o-mini, claude-3-5-haiku-20241022"
68+
},
69+
"llmApiKeyEnv": {
70+
"label": "API Key Env Var",
71+
"placeholder": "e.g. MY_CUSTOM_API_KEY (optional)"
4772
}
4873
}
4974
}

hindsight-integrations/openclaw/package-lock.json

Lines changed: 2 additions & 2 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

hindsight-integrations/openclaw/src/index.ts

Lines changed: 46 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -37,7 +37,7 @@ const PROVIDER_DETECTION = [
3737
{ name: 'ollama', keyEnv: '', defaultModel: 'llama3.2' },
3838
];
3939

40-
function detectLLMConfig(): {
40+
function detectLLMConfig(pluginConfig?: PluginConfig): {
4141
provider: string;
4242
apiKey: string;
4343
model?: string;
@@ -50,7 +50,7 @@ function detectLLMConfig(): {
5050
const overrideKey = process.env.HINDSIGHT_API_LLM_API_KEY;
5151
const overrideBaseUrl = process.env.HINDSIGHT_API_LLM_BASE_URL;
5252

53-
// If provider is explicitly set, use that (with overrides)
53+
// Priority 1: If provider is explicitly set via env var, use that
5454
if (overrideProvider) {
5555
if (!overrideKey && overrideProvider !== 'ollama') {
5656
throw new Error(
@@ -69,7 +69,37 @@ function detectLLMConfig(): {
6969
};
7070
}
7171

72-
// Auto-detect from standard provider env vars
72+
// Priority 2: Plugin config llmProvider/llmModel
73+
if (pluginConfig?.llmProvider) {
74+
const providerInfo = PROVIDER_DETECTION.find(p => p.name === pluginConfig.llmProvider);
75+
76+
// Resolve API key: llmApiKeyEnv > provider's standard keyEnv
77+
let apiKey = '';
78+
if (pluginConfig.llmApiKeyEnv) {
79+
apiKey = process.env[pluginConfig.llmApiKeyEnv] || '';
80+
} else if (providerInfo?.keyEnv) {
81+
apiKey = process.env[providerInfo.keyEnv] || '';
82+
}
83+
84+
if (!apiKey && pluginConfig.llmProvider !== 'ollama') {
85+
const keySource = pluginConfig.llmApiKeyEnv || providerInfo?.keyEnv || 'unknown';
86+
throw new Error(
87+
`Plugin config llmProvider is set to "${pluginConfig.llmProvider}" but no API key found.\n` +
88+
`Expected env var: ${keySource}\n` +
89+
`Set the env var or use llmApiKeyEnv in plugin config to specify a custom env var name.`
90+
);
91+
}
92+
93+
return {
94+
provider: pluginConfig.llmProvider,
95+
apiKey,
96+
model: pluginConfig.llmModel || overrideModel || providerInfo?.defaultModel,
97+
baseUrl: overrideBaseUrl,
98+
source: 'plugin config',
99+
};
100+
}
101+
102+
// Priority 3: Auto-detect from standard provider env vars
73103
for (const providerInfo of PROVIDER_DETECTION) {
74104
const apiKey = providerInfo.keyEnv ? process.env[providerInfo.keyEnv] : '';
75105

@@ -97,7 +127,9 @@ function detectLLMConfig(): {
97127
` export ANTHROPIC_API_KEY=your-key # Uses claude-3-5-haiku\n` +
98128
` export GEMINI_API_KEY=your-key # Uses gemini-2.0-flash-exp\n` +
99129
` export GROQ_API_KEY=your-key # Uses llama-3.3-70b-versatile\n\n` +
100-
`Option 2: Override with Hindsight-specific config:\n` +
130+
`Option 2: Set llmProvider in openclaw.json plugin config:\n` +
131+
` "llmProvider": "openai", "llmModel": "gpt-4o-mini"\n\n` +
132+
`Option 3: Override with Hindsight-specific env vars:\n` +
101133
` export HINDSIGHT_API_LLM_PROVIDER=openai\n` +
102134
` export HINDSIGHT_API_LLM_MODEL=gpt-4o-mini\n` +
103135
` export HINDSIGHT_API_LLM_API_KEY=sk-your-key\n` +
@@ -115,16 +147,23 @@ function getPluginConfig(api: MoltbotPluginAPI): PluginConfig {
115147
embedPort: config.embedPort || 0,
116148
daemonIdleTimeout: config.daemonIdleTimeout !== undefined ? config.daemonIdleTimeout : 0,
117149
embedVersion: config.embedVersion || 'latest',
150+
llmProvider: config.llmProvider,
151+
llmModel: config.llmModel,
152+
llmApiKeyEnv: config.llmApiKeyEnv,
118153
};
119154
}
120155

121156
export default function (api: MoltbotPluginAPI) {
122157
try {
123158
console.log('[Hindsight] Plugin loading...');
124159

125-
// Detect LLM configuration from environment
160+
// Get plugin config first (needed for LLM detection)
161+
console.log('[Hindsight] Getting plugin config...');
162+
const pluginConfig = getPluginConfig(api);
163+
164+
// Detect LLM configuration (env vars > plugin config > auto-detect)
126165
console.log('[Hindsight] Detecting LLM config...');
127-
const llmConfig = detectLLMConfig();
166+
const llmConfig = detectLLMConfig(pluginConfig);
128167

129168
const baseUrlInfo = llmConfig.baseUrl ? `, base URL: ${llmConfig.baseUrl}` : '';
130169
const modelInfo = llmConfig.model || 'default';
@@ -134,9 +173,6 @@ export default function (api: MoltbotPluginAPI) {
134173
} else {
135174
console.log(`[Hindsight] ✓ Using provider: ${llmConfig.provider}, model: ${modelInfo} (${llmConfig.source}${baseUrlInfo})`);
136175
}
137-
138-
console.log('[Hindsight] Getting plugin config...');
139-
const pluginConfig = getPluginConfig(api);
140176
if (pluginConfig.bankMission) {
141177
console.log(`[Hindsight] Custom bank mission configured: "${pluginConfig.bankMission.substring(0, 50)}..."`);
142178
}
@@ -225,8 +261,8 @@ export default function (api: MoltbotPluginAPI) {
225261
// Reinitialize if needed (fresh start or recovery from dead daemon)
226262
if (!isInitialized) {
227263
console.log('[Hindsight] Reinitializing daemon...');
228-
const llmConfig = detectLLMConfig();
229264
const pluginConfig = getPluginConfig(api);
265+
const llmConfig = detectLLMConfig(pluginConfig);
230266
const port = pluginConfig.embedPort || Math.floor(Math.random() * 10000) + 10000;
231267

232268
embedManager = new HindsightEmbedManager(

hindsight-integrations/openclaw/src/types.ts

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -32,6 +32,9 @@ export interface PluginConfig {
3232
embedPort?: number;
3333
daemonIdleTimeout?: number; // Seconds before daemon shuts down (0 = never)
3434
embedVersion?: string; // hindsight-embed version (default: "latest")
35+
llmProvider?: string; // LLM provider override (e.g. 'openai', 'anthropic', 'gemini', 'groq', 'ollama')
36+
llmModel?: string; // LLM model override (e.g. 'gpt-4o-mini', 'claude-3-5-haiku-20241022')
37+
llmApiKeyEnv?: string; // Env var name holding the API key (e.g. 'MY_CUSTOM_KEY')
3538
}
3639

3740
export interface ServiceConfig {

0 commit comments

Comments
 (0)