Skip to content

Commit 8511eca

Browse files
fix(llm): consume llm base url config with a better way (mem0ai#1861)
1 parent 56ceecb commit 8511eca

File tree

5 files changed

+40
-6
lines changed

5 files changed

+40
-6
lines changed

docs/components/llms/config.mdx

+10
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,16 @@ The config is defined as a Python dictionary with two main keys:
99
- `provider`: The name of the llm (e.g., "openai", "groq")
1010
- `config`: A nested dictionary containing provider-specific settings
1111

12+
### Config Values Precedence
13+
14+
Config values are applied in the following order of precedence (from highest to lowest):
15+
16+
1. Values explicitly set in the `config` dictionary
17+
2. Environment variables (e.g., `OPENAI_API_KEY`, `OPENAI_API_BASE`)
18+
3. Default values defined in the LLM implementation
19+
20+
This means that values specified in the `config` dictionary will override corresponding environment variables, which in turn override default values.
21+
1222
## How to Use Config
1323

1424
Here's a general example of how to use the config with mem0:

mem0/configs/llms/base.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -22,9 +22,9 @@ def __init__(
2222
# Openrouter specific
2323
models: Optional[list[str]] = None,
2424
route: Optional[str] = "fallback",
25-
openrouter_base_url: Optional[str] = "https://openrouter.ai/api/v1",
25+
openrouter_base_url: Optional[str] = None,
2626
# Openai specific
27-
openai_base_url: Optional[str] = "https://api.openai.com/v1",
27+
openai_base_url: Optional[str] = None,
2828
site_url: Optional[str] = None,
2929
app_name: Optional[str] = None,
3030
# Ollama specific

mem0/llms/openai.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -18,11 +18,11 @@ def __init__(self, config: Optional[BaseLlmConfig] = None):
1818
if os.environ.get("OPENROUTER_API_KEY"): # Use OpenRouter
1919
self.client = OpenAI(
2020
api_key=os.environ.get("OPENROUTER_API_KEY"),
21-
base_url=self.config.openrouter_base_url,
21+
base_url=self.config.openrouter_base_url or os.getenv("OPENROUTER_API_BASE") or "https://openrouter.ai/api/v1",
2222
)
2323
else:
2424
api_key = self.config.api_key or os.getenv("OPENAI_API_KEY")
25-
base_url = os.getenv("OPENAI_API_BASE") or self.config.openai_base_url
25+
base_url = self.config.openai_base_url or os.getenv("OPENAI_API_BASE") or "https://api.openai.com/v1"
2626
self.client = OpenAI(api_key=api_key, base_url=base_url)
2727

2828
def _parse_response(self, response, tools):

mem0/llms/openai_structured.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@ def __init__(self, config: Optional[BaseLlmConfig] = None):
1616
self.config.model = "gpt-4o-2024-08-06"
1717

1818
api_key = self.config.api_key or os.getenv("OPENAI_API_KEY")
19-
base_url = self.config.openai_base_url or os.getenv("OPENAI_API_BASE")
19+
base_url = self.config.openai_base_url or os.getenv("OPENAI_API_BASE") or "https://api.openai.com/v1"
2020
self.client = OpenAI(api_key=api_key, base_url=base_url)
2121

2222
def _parse_response(self, response, tools):

tests/llms/test_openai.py

+25-1
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
from unittest.mock import Mock, patch
2-
2+
import os
33
import pytest
44

55
from mem0.configs.llms.base import BaseLlmConfig
@@ -14,6 +14,30 @@ def mock_openai_client():
1414
yield mock_client
1515

1616

17+
def test_openai_llm_base_url():
18+
# case1: default config: with openai official base url
19+
config = BaseLlmConfig(model="gpt-4o", temperature=0.7, max_tokens=100, top_p=1.0, api_key="api_key")
20+
llm = OpenAILLM(config)
21+
# Note: openai client will parse the raw base_url into a URL object, which will have a trailing slash
22+
assert str(llm.client.base_url) == "https://api.openai.com/v1/"
23+
24+
# case2: with env variable OPENAI_API_BASE
25+
provider_base_url = "https://api.provider.com/v1"
26+
os.environ["OPENAI_API_BASE"] = provider_base_url
27+
config = BaseLlmConfig(model="gpt-4o", temperature=0.7, max_tokens=100, top_p=1.0, api_key="api_key")
28+
llm = OpenAILLM(config)
29+
# Note: openai client will parse the raw base_url into a URL object, which will have a trailing slash
30+
assert str(llm.client.base_url) == provider_base_url + "/"
31+
32+
# case3: with config.openai_base_url
33+
config_base_url = "https://api.config.com/v1"
34+
config = BaseLlmConfig(model="gpt-4o", temperature=0.7, max_tokens=100, top_p=1.0, api_key="api_key",
35+
openai_base_url=config_base_url)
36+
llm = OpenAILLM(config)
37+
# Note: openai client will parse the raw base_url into a URL object, which will have a trailing slash
38+
assert str(llm.client.base_url) == config_base_url + "/"
39+
40+
1741
def test_generate_response_without_tools(mock_openai_client):
1842
config = BaseLlmConfig(model="gpt-4o", temperature=0.7, max_tokens=100, top_p=1.0)
1943
llm = OpenAILLM(config)

0 commit comments

Comments
 (0)