From b23dd28880185db6e645694bd570115f6bd29fd2 Mon Sep 17 00:00:00 2001 From: Ben Houston Date: Tue, 4 Mar 2025 15:24:01 -0500 Subject: [PATCH 1/2] Add Ollama support via the Vercel AI SDK (#85) --- packages/agent/src/core/toolAgent/config.ts | 17 +++++- .../agent/src/tools/interaction/subAgent.ts | 53 +++++++------------ packages/cli/README.md | 17 +++++- packages/cli/src/commands/$default.ts | 4 +- packages/cli/src/settings/config.ts | 1 + packages/cli/tests/settings/config.test.ts | 2 + 6 files changed, 56 insertions(+), 38 deletions(-) diff --git a/packages/agent/src/core/toolAgent/config.ts b/packages/agent/src/core/toolAgent/config.ts index e66737c..4ec3c2b 100644 --- a/packages/agent/src/core/toolAgent/config.ts +++ b/packages/agent/src/core/toolAgent/config.ts @@ -2,11 +2,12 @@ import { execSync } from 'child_process'; import { anthropic } from '@ai-sdk/anthropic'; import { openai } from '@ai-sdk/openai'; +import { createOllama, ollama } from 'ollama-ai-provider'; /** * Available model providers */ -export type ModelProvider = 'anthropic' | 'openai'; +export type ModelProvider = 'anthropic' | 'openai' | 'ollama'; /** * Available models by provider @@ -14,17 +15,29 @@ export type ModelProvider = 'anthropic' | 'openai'; export const AVAILABLE_MODELS = { anthropic: ['claude-3-7-sonnet-20250219', 'claude-3-opus-20240229'], openai: ['gpt-4o-2024-05-13', 'o3-mini-2024-07-18'], + ollama: ['llama3-groq-tool-use'], }; /** * Get the model instance based on provider and model name */ -export function getModel(provider: ModelProvider, modelName: string) { +export function getModel( + provider: ModelProvider, + modelName: string, + options?: { ollamaBaseUrl?: string }, +) { switch (provider) { case 'anthropic': return anthropic(modelName); case 'openai': return openai(modelName); + case 'ollama': + if (options?.ollamaBaseUrl) { + return createOllama({ + baseURL: options.ollamaBaseUrl, + })(modelName); + } + return ollama(modelName); default: throw new Error(`Unknown model provider: ${provider}`); } diff --git a/packages/agent/src/tools/interaction/subAgent.ts b/packages/agent/src/tools/interaction/subAgent.ts index 25e80b3..d175bdf 100644 --- a/packages/agent/src/tools/interaction/subAgent.ts +++ b/packages/agent/src/tools/interaction/subAgent.ts @@ -1,8 +1,8 @@ import { z } from 'zod'; import { zodToJsonSchema } from 'zod-to-json-schema'; -import { getDefaultSystemPrompt } from '../../core/toolAgent/index.js'; import { getModel } from '../../core/toolAgent/config.js'; +import { getDefaultSystemPrompt } from '../../core/toolAgent/index.js'; import { toolAgent } from '../../core/toolAgent.js'; import { Tool, ToolContext } from '../../core/types.js'; import { getTools } from '../getTools.js'; @@ -18,23 +18,14 @@ const parameterSchema = z.object({ projectContext: z .string() .describe('Context about the problem or environment'), - fileContext: z - .object({ - workingDirectory: z - .string() - .optional() - .describe('The directory where the sub-agent should operate'), - relevantFiles: z - .string() - .optional() - .describe( - 'A list of files, which may include ** or * wildcard characters', - ), - }) - .describe( - 'When working with files and directories, it is best to be very specific to avoid sub-agents making incorrect assumptions', - ) - .optional(), + workingDirectory: z + .string() + .optional() + .describe('The directory where the sub-agent should operate'), + relevantFilesDirectories: z + .string() + .optional() + .describe('A list of files, which may include ** or * wildcard characters'), }); const returnSchema = z.object({ @@ -77,25 +68,22 @@ export const subAgentTool: Tool = { returnsJsonSchema: zodToJsonSchema(returnSchema), execute: async (params, context) => { // Validate parameters - const { description, goal, projectContext, fileContext } = - parameterSchema.parse(params); + const { + description, + goal, + projectContext, + workingDirectory, + relevantFilesDirectories, + } = parameterSchema.parse(params); // Construct a well-structured prompt const prompt = [ `Description: ${description}`, `Goal: ${goal}`, `Project Context: ${projectContext}`, - fileContext - ? `\nContext:\n${[ - fileContext.workingDirectory - ? `- Working Directory: ${fileContext.workingDirectory}` - : '', - fileContext.relevantFiles - ? `- Relevant Files:\n ${fileContext.relevantFiles}` - : '', - ] - .filter(Boolean) - .join('\n')}` + workingDirectory ? `Working Directory: ${workingDirectory}` : '', + relevantFilesDirectories + ? `Relevant Files:\n ${relevantFilesDirectories}` : '', ] .filter(Boolean) @@ -110,8 +98,7 @@ export const subAgentTool: Tool = { const result = await toolAgent(prompt, tools, config, { ...context, - workingDirectory: - fileContext?.workingDirectory ?? context.workingDirectory, + workingDirectory: workingDirectory ?? context.workingDirectory, }); return { response: result.result }; }, diff --git a/packages/cli/README.md b/packages/cli/README.md index 3a2a40b..a1894fc 100644 --- a/packages/cli/README.md +++ b/packages/cli/README.md @@ -4,7 +4,7 @@ Command-line interface for AI-powered coding tasks. ## Features -- 🤖 **AI-Powered**: Leverages Anthropic's Claude and OpenAI models for intelligent coding assistance +- 🤖 **AI-Powered**: Leverages Anthropic's Claude, OpenAI models, and Ollama for intelligent coding assistance - 🛠️ **Extensible Tool System**: Modular architecture with various tool categories - 🔄 **Parallel Execution**: Ability to spawn sub-agents for concurrent task processing - 📝 **Self-Modification**: Can modify code, it was built and tested by writing itself @@ -82,7 +82,7 @@ mycoder config set modelName gpt-4o-2024-05-13 ### Model Selection -MyCoder supports both Anthropic and OpenAI models. You can configure which model to use with the following commands: +MyCoder supports Anthropic, OpenAI, and Ollama models. You can configure which model to use with the following commands: ```bash # Use OpenAI's GPT-4o model @@ -100,6 +100,13 @@ mycoder config set modelName claude-3-7-sonnet-20250219 # Use Anthropic's Claude 3 Opus model mycoder config set modelProvider anthropic mycoder config set modelName claude-3-opus-20240229 + +# Use Ollama's llama3-groq-tool-use model (local) +mycoder config set modelProvider ollama +mycoder config set modelName llama3-groq-tool-use + +# Configure custom Ollama server URL (default is http://localhost:11434/api) +mycoder config set ollamaBaseUrl http://your-ollama-server:11434/api ``` You can also specify the model provider and name directly when running a command: @@ -114,6 +121,7 @@ mycoder --modelProvider openai --modelName gpt-4o-2024-05-13 "Your prompt here" - `headless`: Run browser in headless mode with no UI showing (default: `true`) - `userSession`: Use user's existing browser session instead of sandboxed session (default: `false`) - `pageFilter`: Method to process webpage content: 'simple', 'none', or 'readability' (default: `none`) +- `ollamaBaseUrl`: Base URL for Ollama API (default: `http://localhost:11434/api`) Example: @@ -126,6 +134,9 @@ mycoder config set userSession true # Use readability for webpage processing mycoder config set pageFilter readability + +# Set custom Ollama server URL +mycoder config set ollamaBaseUrl http://your-ollama-server:11434/api ``` ## Environment Variables @@ -133,6 +144,8 @@ mycoder config set pageFilter readability - `ANTHROPIC_API_KEY`: Your Anthropic API key (required when using Anthropic models) - `OPENAI_API_KEY`: Your OpenAI API key (required when using OpenAI models) +Note: Ollama models do not require an API key as they run locally or on a specified server. + ## Development ```bash diff --git a/packages/cli/src/commands/$default.ts b/packages/cli/src/commands/$default.ts index cdfaab2..ade0941 100644 --- a/packages/cli/src/commands/$default.ts +++ b/packages/cli/src/commands/$default.ts @@ -109,6 +109,7 @@ export const command: CommandModule = { ); throw new Error('OpenAI API key not found'); } + // No API key check needed for Ollama as it uses a local server // Validate model name if (!AVAILABLE_MODELS[userModelProvider].includes(userModelName)) { @@ -166,8 +167,9 @@ export const command: CommandModule = { const agentConfig = { ...DEFAULT_CONFIG, model: getModel( - userModelProvider as 'anthropic' | 'openai', + userModelProvider as 'anthropic' | 'openai' | 'ollama', userModelName, + { ollamaBaseUrl: config.ollamaBaseUrl }, ), }; diff --git a/packages/cli/src/settings/config.ts b/packages/cli/src/settings/config.ts index de00d2f..f7fe29f 100644 --- a/packages/cli/src/settings/config.ts +++ b/packages/cli/src/settings/config.ts @@ -14,6 +14,7 @@ const defaultConfig = { pageFilter: 'none' as 'simple' | 'none' | 'readability', modelProvider: 'anthropic', modelName: 'claude-3-7-sonnet-20250219', + ollamaBaseUrl: 'http://localhost:11434/api', }; export type Config = typeof defaultConfig; diff --git a/packages/cli/tests/settings/config.test.ts b/packages/cli/tests/settings/config.test.ts index 71ff3da..79d2280 100644 --- a/packages/cli/tests/settings/config.test.ts +++ b/packages/cli/tests/settings/config.test.ts @@ -43,6 +43,7 @@ describe('Config', () => { pageFilter: 'none', modelProvider: 'anthropic', modelName: 'claude-3-7-sonnet-20250219', + ollamaBaseUrl: 'http://localhost:11434/api', }); expect(fs.existsSync).toHaveBeenCalledWith(mockConfigFile); }); @@ -74,6 +75,7 @@ describe('Config', () => { pageFilter: 'none', modelProvider: 'anthropic', modelName: 'claude-3-7-sonnet-20250219', + ollamaBaseUrl: 'http://localhost:11434/api', }); }); }); From 5ef885987373747b57d85190149e8b40e5756ecc Mon Sep 17 00:00:00 2001 From: Ben Houston Date: Tue, 4 Mar 2025 15:28:36 -0500 Subject: [PATCH 2/2] Remove hardcoded model list and validation (#85) --- packages/agent/src/core/toolAgent/config.ts | 9 --------- packages/cli/README.md | 22 +++++++-------------- packages/cli/src/commands/$default.ts | 10 ---------- 3 files changed, 7 insertions(+), 34 deletions(-) diff --git a/packages/agent/src/core/toolAgent/config.ts b/packages/agent/src/core/toolAgent/config.ts index 4ec3c2b..97f1408 100644 --- a/packages/agent/src/core/toolAgent/config.ts +++ b/packages/agent/src/core/toolAgent/config.ts @@ -9,15 +9,6 @@ import { createOllama, ollama } from 'ollama-ai-provider'; */ export type ModelProvider = 'anthropic' | 'openai' | 'ollama'; -/** - * Available models by provider - */ -export const AVAILABLE_MODELS = { - anthropic: ['claude-3-7-sonnet-20250219', 'claude-3-opus-20240229'], - openai: ['gpt-4o-2024-05-13', 'o3-mini-2024-07-18'], - ollama: ['llama3-groq-tool-use'], -}; - /** * Get the model instance based on provider and model name */ diff --git a/packages/cli/README.md b/packages/cli/README.md index a1894fc..e3daaf7 100644 --- a/packages/cli/README.md +++ b/packages/cli/README.md @@ -82,28 +82,20 @@ mycoder config set modelName gpt-4o-2024-05-13 ### Model Selection -MyCoder supports Anthropic, OpenAI, and Ollama models. You can configure which model to use with the following commands: +MyCoder supports Anthropic, OpenAI, and Ollama models. You can configure which model provider and model name to use with the following commands: ```bash -# Use OpenAI's GPT-4o model +# Use OpenAI models mycoder config set modelProvider openai -mycoder config set modelName gpt-4o-2024-05-13 - -# Use OpenAI's o3-mini model -mycoder config set modelProvider openai -mycoder config set modelName o3-mini-2024-07-18 - -# Use Anthropic's Claude 3.7 Sonnet model -mycoder config set modelProvider anthropic -mycoder config set modelName claude-3-7-sonnet-20250219 +mycoder config set modelName gpt-4o-2024-05-13 # or any other OpenAI model -# Use Anthropic's Claude 3 Opus model +# Use Anthropic models mycoder config set modelProvider anthropic -mycoder config set modelName claude-3-opus-20240229 +mycoder config set modelName claude-3-7-sonnet-20250219 # or any other Anthropic model -# Use Ollama's llama3-groq-tool-use model (local) +# Use Ollama models (local) mycoder config set modelProvider ollama -mycoder config set modelName llama3-groq-tool-use +mycoder config set modelName llama3-groq-tool-use # or any other model available in your Ollama instance # Configure custom Ollama server URL (default is http://localhost:11434/api) mycoder config set ollamaBaseUrl http://your-ollama-server:11434/api diff --git a/packages/cli/src/commands/$default.ts b/packages/cli/src/commands/$default.ts index ade0941..6cf7740 100644 --- a/packages/cli/src/commands/$default.ts +++ b/packages/cli/src/commands/$default.ts @@ -12,7 +12,6 @@ import { subAgentTool, errorToString, getModel, - AVAILABLE_MODELS, DEFAULT_CONFIG, } from 'mycoder-agent'; import { TokenTracker } from 'mycoder-agent/dist/core/tokens.js'; @@ -111,15 +110,6 @@ export const command: CommandModule = { } // No API key check needed for Ollama as it uses a local server - // Validate model name - if (!AVAILABLE_MODELS[userModelProvider].includes(userModelName)) { - logger.error( - `Invalid model name: ${userModelName} for provider ${userModelProvider}`, - `Available models for ${userModelProvider}: ${AVAILABLE_MODELS[userModelProvider].join(', ')}`, - ); - throw new Error(`Invalid model name: ${userModelName}`); - } - let prompt: string | undefined; // If promptFile is specified, read from file