diff --git a/packages/agent/src/core/toolAgent/config.ts b/packages/agent/src/core/toolAgent/config.ts index 5bdc589..902af2f 100644 --- a/packages/agent/src/core/toolAgent/config.ts +++ b/packages/agent/src/core/toolAgent/config.ts @@ -20,25 +20,25 @@ export type ModelProvider = */ export function getModel( provider: ModelProvider, - modelName: string, + model: string, options?: { ollamaBaseUrl?: string }, ): LLMProvider { switch (provider) { case 'anthropic': - return createProvider('anthropic', modelName); + return createProvider('anthropic', model); case 'openai': - return createProvider('openai', modelName); + return createProvider('openai', model); case 'ollama': if (options?.ollamaBaseUrl) { - return createProvider('ollama', modelName, { + return createProvider('ollama', model, { baseUrl: options.ollamaBaseUrl, }); } - return createProvider('ollama', modelName); + return createProvider('ollama', model); case 'xai': - return createProvider('xai', modelName); + return createProvider('xai', model); case 'mistral': - return createProvider('mistral', modelName); + return createProvider('mistral', model); default: throw new Error(`Unknown model provider: ${provider}`); } diff --git a/packages/cli/README.md b/packages/cli/README.md index 143c571..62ff897 100644 --- a/packages/cli/README.md +++ b/packages/cli/README.md @@ -80,9 +80,6 @@ mycoder config set githubMode true # Reset a configuration value to its default mycoder config clear customPrompt -# Configure model provider and model name -mycoder config set modelProvider openai -mycoder config set modelName gpt-4o-2024-05-13 ``` ### Model Selection @@ -93,33 +90,9 @@ MyCoder supports Anthropic, OpenAI, xAI/Grok, Mistral AI, and Ollama models. You ```bash # Use Anthropic models [These work the best at this time] -mycoder config set modelProvider anthropic -mycoder config set modelName claude-3-7-sonnet-20250219 # or any other Anthropic model +mycoder config set provider anthropic +mycoder config set model claude-3-7-sonnet-20250219 # or any other Anthropic model -# Use OpenAI models -mycoder config set modelProvider openai -mycoder config set modelName gpt-4o-2024-05-13 # or any other OpenAI model - -# Use xAI/Grok models -mycoder config set modelProvider xai -mycoder config set modelName grok-1 # or any other xAI model - -# Use Mistral AI models -mycoder config set modelProvider mistral -mycoder config set modelName mistral-large-latest # or any other Mistral model - -# Use Ollama models (local) -mycoder config set modelProvider ollama -mycoder config set modelName llama3-groq-tool-use # or any other model available in your Ollama instance - -# Configure custom Ollama server URL (default is http://localhost:11434/api) -mycoder config set ollamaBaseUrl http://your-ollama-server:11434/api -``` - -You can also specify the model provider and name directly when running a command: - -```bash -mycoder --modelProvider openai --modelName gpt-4o-2024-05-13 "Your prompt here" ``` ### Available Configuration Options diff --git a/packages/cli/src/commands/$default.ts b/packages/cli/src/commands/$default.ts index 6b4d1a0..2e05349 100644 --- a/packages/cli/src/commands/$default.ts +++ b/packages/cli/src/commands/$default.ts @@ -96,16 +96,8 @@ export const command: CommandModule = { tokenTracker.tokenCache = argv.tokenCache !== undefined ? argv.tokenCache : userConfig.tokenCache; - const userModelProvider = - argv.provider || - argv.modelProvider || - userConfig.provider || - userConfig.modelProvider; - const userModelName = - argv.model || - argv.modelName || - userConfig.model || - userConfig.modelName; + const userModelProvider = argv.provider || userConfig.provider; + const userModelName = argv.model || userConfig.model; const userMaxTokens = argv.maxTokens || userConfig.maxTokens; const userTemperature = argv.temperature || userConfig.temperature; diff --git a/packages/cli/src/options.ts b/packages/cli/src/options.ts index e865bc9..cad82d6 100644 --- a/packages/cli/src/options.ts +++ b/packages/cli/src/options.ts @@ -9,9 +9,6 @@ export type SharedOptions = { readonly sentryDsn?: string; readonly provider?: string; readonly model?: string; - // Legacy options - will be removed in a future version - readonly modelProvider?: string; - readonly modelName?: string; readonly maxTokens?: number; readonly temperature?: number; readonly profile?: boolean; @@ -41,18 +38,6 @@ export const sharedOptions = { type: 'string', description: 'AI model name to use', } as const, - // Legacy options - will be removed in a future version - modelProvider: { - type: 'string', - description: 'AI model provider to use (deprecated, use provider instead)', - choices: ['anthropic', 'openai', 'ollama', 'xai', 'mistral'], - hidden: true, - } as const, - modelName: { - type: 'string', - description: 'AI model name to use (deprecated, use model instead)', - hidden: true, - } as const, maxTokens: { type: 'number', description: 'Maximum number of tokens to generate', diff --git a/packages/cli/src/settings/config.ts b/packages/cli/src/settings/config.ts index 632a716..ea0b3e3 100644 --- a/packages/cli/src/settings/config.ts +++ b/packages/cli/src/settings/config.ts @@ -14,9 +14,6 @@ const defaultConfig = { pageFilter: 'none' as 'simple' | 'none' | 'readability', provider: 'anthropic', model: 'claude-3-7-sonnet-20250219', - // Legacy names - will be removed in a future version - modelProvider: 'anthropic', - modelName: 'claude-3-7-sonnet-20250219', maxTokens: 4096, temperature: 0.7, ollamaBaseUrl: 'http://localhost:11434/api', diff --git a/packages/cli/tests/settings/config-defaults.test.ts b/packages/cli/tests/settings/config-defaults.test.ts index 96f8b53..a724aed 100644 --- a/packages/cli/tests/settings/config-defaults.test.ts +++ b/packages/cli/tests/settings/config-defaults.test.ts @@ -60,8 +60,8 @@ describe('Config Defaults for CLI Options', () => { pageFilter: 'none', provider: 'anthropic', model: 'claude-3-7-sonnet-20250219', - modelProvider: 'anthropic', - modelName: 'claude-3-7-sonnet-20250219', + provider: 'anthropic', + model: 'claude-3-7-sonnet-20250219', ollamaBaseUrl: 'http://localhost:11434/api', }); @@ -99,8 +99,8 @@ describe('Config Defaults for CLI Options', () => { pageFilter: 'none', // Default is none provider: 'anthropic', model: 'claude-3-7-sonnet-20250219', - modelProvider: 'anthropic', - modelName: 'claude-3-7-sonnet-20250219', + provider: 'anthropic', + model: 'claude-3-7-sonnet-20250219', ollamaBaseUrl: 'http://localhost:11434/api', }); @@ -138,8 +138,8 @@ describe('Config Defaults for CLI Options', () => { pageFilter: 'none', provider: 'anthropic', model: 'claude-3-7-sonnet-20250219', - modelProvider: 'anthropic', - modelName: 'claude-3-7-sonnet-20250219', + provider: 'anthropic', + model: 'claude-3-7-sonnet-20250219', ollamaBaseUrl: 'http://localhost:11434/api', }); @@ -189,8 +189,8 @@ describe('Config Defaults for CLI Options', () => { pageFilter: 'none', // Default is none provider: 'anthropic', model: 'claude-3-7-sonnet-20250219', - modelProvider: 'anthropic', - modelName: 'claude-3-7-sonnet-20250219', + provider: 'anthropic', + model: 'claude-3-7-sonnet-20250219', ollamaBaseUrl: 'http://localhost:11434/api', }); diff --git a/packages/cli/tests/settings/config.test.ts b/packages/cli/tests/settings/config.test.ts index b31358b..b1bc3d0 100644 --- a/packages/cli/tests/settings/config.test.ts +++ b/packages/cli/tests/settings/config.test.ts @@ -43,8 +43,8 @@ describe('Config', () => { pageFilter: 'none', provider: 'anthropic', model: 'claude-3-7-sonnet-20250219', - modelProvider: 'anthropic', - modelName: 'claude-3-7-sonnet-20250219', + provider: 'anthropic', + model: 'claude-3-7-sonnet-20250219', maxTokens: 4096, temperature: 0.7, ollamaBaseUrl: 'http://localhost:11434/api', @@ -87,8 +87,8 @@ describe('Config', () => { pageFilter: 'none', provider: 'anthropic', model: 'claude-3-7-sonnet-20250219', - modelProvider: 'anthropic', - modelName: 'claude-3-7-sonnet-20250219', + provider: 'anthropic', + model: 'claude-3-7-sonnet-20250219', maxTokens: 4096, temperature: 0.7, ollamaBaseUrl: 'http://localhost:11434/api', diff --git a/packages/cli/tests/settings/configDefaults.test.ts b/packages/cli/tests/settings/configDefaults.test.ts index 96f8b53..a724aed 100644 --- a/packages/cli/tests/settings/configDefaults.test.ts +++ b/packages/cli/tests/settings/configDefaults.test.ts @@ -60,8 +60,8 @@ describe('Config Defaults for CLI Options', () => { pageFilter: 'none', provider: 'anthropic', model: 'claude-3-7-sonnet-20250219', - modelProvider: 'anthropic', - modelName: 'claude-3-7-sonnet-20250219', + provider: 'anthropic', + model: 'claude-3-7-sonnet-20250219', ollamaBaseUrl: 'http://localhost:11434/api', }); @@ -99,8 +99,8 @@ describe('Config Defaults for CLI Options', () => { pageFilter: 'none', // Default is none provider: 'anthropic', model: 'claude-3-7-sonnet-20250219', - modelProvider: 'anthropic', - modelName: 'claude-3-7-sonnet-20250219', + provider: 'anthropic', + model: 'claude-3-7-sonnet-20250219', ollamaBaseUrl: 'http://localhost:11434/api', }); @@ -138,8 +138,8 @@ describe('Config Defaults for CLI Options', () => { pageFilter: 'none', provider: 'anthropic', model: 'claude-3-7-sonnet-20250219', - modelProvider: 'anthropic', - modelName: 'claude-3-7-sonnet-20250219', + provider: 'anthropic', + model: 'claude-3-7-sonnet-20250219', ollamaBaseUrl: 'http://localhost:11434/api', }); @@ -189,8 +189,8 @@ describe('Config Defaults for CLI Options', () => { pageFilter: 'none', // Default is none provider: 'anthropic', model: 'claude-3-7-sonnet-20250219', - modelProvider: 'anthropic', - modelName: 'claude-3-7-sonnet-20250219', + provider: 'anthropic', + model: 'claude-3-7-sonnet-20250219', ollamaBaseUrl: 'http://localhost:11434/api', });