diff --git a/.gitignore b/.gitignore index 3fa4cdb..7258887 100644 --- a/.gitignore +++ b/.gitignore @@ -9,3 +9,4 @@ test/ build/ scripts/ dist/ +contribute/output/ diff --git a/README.md b/README.md index 2b01d3c..0f7ab2f 100644 --- a/README.md +++ b/README.md @@ -17,10 +17,12 @@ This architecture allows language models to: Currently supports: - Claude 3.5 Sonnet (claude-3-5-sonnet-20240620) - Any Ollama-compatible model with function calling support +- Any OpenAI-compatible local or online model with function calling support + ## Features ✨ -- Interactive conversations with either Claude 3.5 Sonnet or Ollama models +- Interactive conversations with support models - Support for multiple concurrent MCP servers - Dynamic tool discovery and integration - Tool calling capabilities for both model types @@ -53,6 +55,10 @@ ollama pull mistral ollama serve ``` +3. OpenAI compatible online Setup +- Get your api server base url, api key and model name + + ## Installation 📦 ```bash @@ -99,7 +105,7 @@ MCPHost is a CLI tool that allows you to interact with various AI models through ### Available Models Models can be specified using the `--model` (`-m`) flag: - Anthropic Claude (default): `anthropic:claude-3-5-sonnet-latest` -- OpenAI: `openai:gpt-4` +- OpenAI or OpenAI-compatible: `openai:gpt-4` - Ollama models: `ollama:modelname` ### Examples @@ -109,6 +115,11 @@ mcphost -m ollama:qwen2.5:3b # Use OpenAI's GPT-4 mcphost -m openai:gpt-4 + +# Use OpenAI-compatible model +mcphost --model openai: \ +--openai-url \ +--openai-api-key ``` ### Flags diff --git a/contribute/boost.sh b/contribute/boost.sh new file mode 100755 index 0000000..1f0775b --- /dev/null +++ b/contribute/boost.sh @@ -0,0 +1,4 @@ +./output/mcphost --model openai: \ +--openai-url \ +--openai-api-key \ +--config ./conf/demo.json --debug \ No newline at end of file diff --git a/contribute/build.sh b/contribute/build.sh new file mode 100755 index 0000000..699ad26 --- /dev/null +++ b/contribute/build.sh @@ -0,0 +1,6 @@ +#!/bin/bash + +RUN_NAME="mcphost" + +mkdir -p output +go build -o output/${RUN_NAME} \ No newline at end of file diff --git a/contribute/conf/demo.json b/contribute/conf/demo.json new file mode 100644 index 0000000..20745e2 --- /dev/null +++ b/contribute/conf/demo.json @@ -0,0 +1,22 @@ +{ + "mcpServers": { + "filesystem": { + "command": "npx", + "args": [ + "-y", + "@modelcontextprotocol/server-filesystem", + "./" + ] + }, + "weather": { + "command": "uv", + "args": [ + "--directory", + "/Users/bytedance/code/MCP/my_host/weather", + "run", + "weather.py" + ] + } + } +} + diff --git a/contribute/contribute.md b/contribute/contribute.md new file mode 100644 index 0000000..4ad6dc2 --- /dev/null +++ b/contribute/contribute.md @@ -0,0 +1,31 @@ +# Contribute README +Thanks for your contribution, you can follow these step to run this repo and debug it. +## Run demo +1. clone this repo to your work dir. + ```bash + git clone https://github.com/mark3labs/mcphost.git + ``` + +2. enter the `contribute` dir. + ```bash + cd mcphost/contribute + ``` + +3. run `build.sh` to build your binary file. + ```bash + ./build.sh + ``` + +4. open `boost.sh` file and fill your model info in. + ```bash + cat boost.sh + vi boost.sh + ``` + +5. run `boost.sh` to run your mcphost, if you don't want run it in debug model, you can delete the `--debug` flag in `boost.sh`. + ```bash + ./boost.sh + ``` + +## Contribute your code +just write your code and push it. diff --git a/pkg/llm/openai/client.go b/pkg/llm/openai/client.go index f6790c3..88c2d0d 100644 --- a/pkg/llm/openai/client.go +++ b/pkg/llm/openai/client.go @@ -6,7 +6,6 @@ import ( "encoding/json" "fmt" "net/http" - "strings" ) type Client struct { @@ -18,8 +17,6 @@ type Client struct { func NewClient(apiKey string, baseURL string) *Client { if baseURL == "" { baseURL = "https://api.openai.com/v1" - } else if !strings.HasSuffix(baseURL, "/v1") { - baseURL = strings.TrimSuffix(baseURL, "/") + "/v1" } return &Client{ apiKey: apiKey, diff --git a/pkg/llm/openai/types.go b/pkg/llm/openai/types.go index 5b8ce5d..cdc4cad 100644 --- a/pkg/llm/openai/types.go +++ b/pkg/llm/openai/types.go @@ -9,12 +9,13 @@ type CreateRequest struct { } type MessageParam struct { - Role string `json:"role"` - Content *string `json:"content"` - FunctionCall *FunctionCall `json:"function_call,omitempty"` - ToolCalls []ToolCall `json:"tool_calls,omitempty"` - Name string `json:"name,omitempty"` - ToolCallID string `json:"tool_call_id,omitempty"` + Role string `json:"role"` + Content *string `json:"content"` + ReasoningContent *string `json:"reasoning_content"` + FunctionCall *FunctionCall `json:"function_call,omitempty"` + ToolCalls []ToolCall `json:"tool_calls,omitempty"` + Name string `json:"name,omitempty"` + ToolCallID string `json:"tool_call_id,omitempty"` } type ToolCall struct {