Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
24 changes: 14 additions & 10 deletions packages/agent/src/core/toolAgent/config.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,29 +2,33 @@ import { execSync } from 'child_process';

import { anthropic } from '@ai-sdk/anthropic';
import { openai } from '@ai-sdk/openai';
import { createOllama, ollama } from 'ollama-ai-provider';

/**
* Available model providers
*/
export type ModelProvider = 'anthropic' | 'openai';

/**
* Available models by provider
*/
export const AVAILABLE_MODELS = {
anthropic: ['claude-3-7-sonnet-20250219', 'claude-3-opus-20240229'],
openai: ['gpt-4o-2024-05-13', 'o3-mini-2024-07-18'],
};
export type ModelProvider = 'anthropic' | 'openai' | 'ollama';

/**
* Get the model instance based on provider and model name
*/
export function getModel(provider: ModelProvider, modelName: string) {
export function getModel(
provider: ModelProvider,
modelName: string,
options?: { ollamaBaseUrl?: string },
) {
switch (provider) {
case 'anthropic':
return anthropic(modelName);
case 'openai':
return openai(modelName);
case 'ollama':
if (options?.ollamaBaseUrl) {
return createOllama({
baseURL: options.ollamaBaseUrl,
})(modelName);
}
return ollama(modelName);
default:
throw new Error(`Unknown model provider: ${provider}`);
}
Expand Down
53 changes: 20 additions & 33 deletions packages/agent/src/tools/interaction/subAgent.ts
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
import { z } from 'zod';
import { zodToJsonSchema } from 'zod-to-json-schema';

import { getDefaultSystemPrompt } from '../../core/toolAgent/index.js';
import { getModel } from '../../core/toolAgent/config.js';
import { getDefaultSystemPrompt } from '../../core/toolAgent/index.js';
import { toolAgent } from '../../core/toolAgent.js';
import { Tool, ToolContext } from '../../core/types.js';
import { getTools } from '../getTools.js';
Expand All @@ -18,23 +18,14 @@ const parameterSchema = z.object({
projectContext: z
.string()
.describe('Context about the problem or environment'),
fileContext: z
.object({
workingDirectory: z
.string()
.optional()
.describe('The directory where the sub-agent should operate'),
relevantFiles: z
.string()
.optional()
.describe(
'A list of files, which may include ** or * wildcard characters',
),
})
.describe(
'When working with files and directories, it is best to be very specific to avoid sub-agents making incorrect assumptions',
)
.optional(),
workingDirectory: z
.string()
.optional()
.describe('The directory where the sub-agent should operate'),
relevantFilesDirectories: z
.string()
.optional()
.describe('A list of files, which may include ** or * wildcard characters'),
});

const returnSchema = z.object({
Expand Down Expand Up @@ -77,25 +68,22 @@ export const subAgentTool: Tool<Parameters, ReturnType> = {
returnsJsonSchema: zodToJsonSchema(returnSchema),
execute: async (params, context) => {
// Validate parameters
const { description, goal, projectContext, fileContext } =
parameterSchema.parse(params);
const {
description,
goal,
projectContext,
workingDirectory,
relevantFilesDirectories,
} = parameterSchema.parse(params);

// Construct a well-structured prompt
const prompt = [
`Description: ${description}`,
`Goal: ${goal}`,
`Project Context: ${projectContext}`,
fileContext
? `\nContext:\n${[
fileContext.workingDirectory
? `- Working Directory: ${fileContext.workingDirectory}`
: '',
fileContext.relevantFiles
? `- Relevant Files:\n ${fileContext.relevantFiles}`
: '',
]
.filter(Boolean)
.join('\n')}`
workingDirectory ? `Working Directory: ${workingDirectory}` : '',
relevantFilesDirectories
? `Relevant Files:\n ${relevantFilesDirectories}`
: '',
]
.filter(Boolean)
Expand All @@ -110,8 +98,7 @@ export const subAgentTool: Tool<Parameters, ReturnType> = {

const result = await toolAgent(prompt, tools, config, {
...context,
workingDirectory:
fileContext?.workingDirectory ?? context.workingDirectory,
workingDirectory: workingDirectory ?? context.workingDirectory,
});
return { response: result.result };
},
Expand Down
31 changes: 18 additions & 13 deletions packages/cli/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ Command-line interface for AI-powered coding tasks.

## Features

- 🤖 **AI-Powered**: Leverages Anthropic's Claude and OpenAI models for intelligent coding assistance
- 🤖 **AI-Powered**: Leverages Anthropic's Claude, OpenAI models, and Ollama for intelligent coding assistance
- 🛠️ **Extensible Tool System**: Modular architecture with various tool categories
- 🔄 **Parallel Execution**: Ability to spawn sub-agents for concurrent task processing
- 📝 **Self-Modification**: Can modify code, it was built and tested by writing itself
Expand Down Expand Up @@ -82,24 +82,23 @@ mycoder config set modelName gpt-4o-2024-05-13

### Model Selection

MyCoder supports both Anthropic and OpenAI models. You can configure which model to use with the following commands:
MyCoder supports Anthropic, OpenAI, and Ollama models. You can configure which model provider and model name to use with the following commands:

```bash
# Use OpenAI's GPT-4o model
# Use OpenAI models
mycoder config set modelProvider openai
mycoder config set modelName gpt-4o-2024-05-13

# Use OpenAI's o3-mini model
mycoder config set modelProvider openai
mycoder config set modelName o3-mini-2024-07-18
mycoder config set modelName gpt-4o-2024-05-13 # or any other OpenAI model

# Use Anthropic's Claude 3.7 Sonnet model
# Use Anthropic models
mycoder config set modelProvider anthropic
mycoder config set modelName claude-3-7-sonnet-20250219
mycoder config set modelName claude-3-7-sonnet-20250219 # or any other Anthropic model

# Use Anthropic's Claude 3 Opus model
mycoder config set modelProvider anthropic
mycoder config set modelName claude-3-opus-20240229
# Use Ollama models (local)
mycoder config set modelProvider ollama
mycoder config set modelName llama3-groq-tool-use # or any other model available in your Ollama instance

# Configure custom Ollama server URL (default is http://localhost:11434/api)
mycoder config set ollamaBaseUrl http://your-ollama-server:11434/api
```

You can also specify the model provider and name directly when running a command:
Expand All @@ -114,6 +113,7 @@ mycoder --modelProvider openai --modelName gpt-4o-2024-05-13 "Your prompt here"
- `headless`: Run browser in headless mode with no UI showing (default: `true`)
- `userSession`: Use user's existing browser session instead of sandboxed session (default: `false`)
- `pageFilter`: Method to process webpage content: 'simple', 'none', or 'readability' (default: `none`)
- `ollamaBaseUrl`: Base URL for Ollama API (default: `http://localhost:11434/api`)

Example:

Expand All @@ -126,13 +126,18 @@ mycoder config set userSession true

# Use readability for webpage processing
mycoder config set pageFilter readability

# Set custom Ollama server URL
mycoder config set ollamaBaseUrl http://your-ollama-server:11434/api
```

## Environment Variables

- `ANTHROPIC_API_KEY`: Your Anthropic API key (required when using Anthropic models)
- `OPENAI_API_KEY`: Your OpenAI API key (required when using OpenAI models)

Note: Ollama models do not require an API key as they run locally or on a specified server.

## Development

```bash
Expand Down
14 changes: 3 additions & 11 deletions packages/cli/src/commands/$default.ts
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,6 @@ import {
subAgentTool,
errorToString,
getModel,
AVAILABLE_MODELS,
DEFAULT_CONFIG,
} from 'mycoder-agent';
import { TokenTracker } from 'mycoder-agent/dist/core/tokens.js';
Expand Down Expand Up @@ -109,15 +108,7 @@ export const command: CommandModule<SharedOptions, DefaultArgs> = {
);
throw new Error('OpenAI API key not found');
}

// Validate model name
if (!AVAILABLE_MODELS[userModelProvider].includes(userModelName)) {
logger.error(
`Invalid model name: ${userModelName} for provider ${userModelProvider}`,
`Available models for ${userModelProvider}: ${AVAILABLE_MODELS[userModelProvider].join(', ')}`,
);
throw new Error(`Invalid model name: ${userModelName}`);
}
// No API key check needed for Ollama as it uses a local server

let prompt: string | undefined;

Expand Down Expand Up @@ -166,8 +157,9 @@ export const command: CommandModule<SharedOptions, DefaultArgs> = {
const agentConfig = {
...DEFAULT_CONFIG,
model: getModel(
userModelProvider as 'anthropic' | 'openai',
userModelProvider as 'anthropic' | 'openai' | 'ollama',
userModelName,
{ ollamaBaseUrl: config.ollamaBaseUrl },
),
};

Expand Down
1 change: 1 addition & 0 deletions packages/cli/src/settings/config.ts
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ const defaultConfig = {
pageFilter: 'none' as 'simple' | 'none' | 'readability',
modelProvider: 'anthropic',
modelName: 'claude-3-7-sonnet-20250219',
ollamaBaseUrl: 'http://localhost:11434/api',
};

export type Config = typeof defaultConfig;
Expand Down
2 changes: 2 additions & 0 deletions packages/cli/tests/settings/config.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,7 @@ describe('Config', () => {
pageFilter: 'none',
modelProvider: 'anthropic',
modelName: 'claude-3-7-sonnet-20250219',
ollamaBaseUrl: 'http://localhost:11434/api',
});
expect(fs.existsSync).toHaveBeenCalledWith(mockConfigFile);
});
Expand Down Expand Up @@ -74,6 +75,7 @@ describe('Config', () => {
pageFilter: 'none',
modelProvider: 'anthropic',
modelName: 'claude-3-7-sonnet-20250219',
ollamaBaseUrl: 'http://localhost:11434/api',
});
});
});
Expand Down
Loading