Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ class ModelFallbackMiddleware(AgentMiddleware):

fallback = ModelFallbackMiddleware(
"openai:gpt-4o-mini", # Try first on error
"anthropic:claude-3-5-sonnet-20241022", # Then this
"anthropic:claude-sonnet-4-5-20250929", # Then this
)

agent = create_agent(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -47,9 +47,7 @@ class LLMToolEmulator(AgentMiddleware):

Use a custom model for emulation:
```python
middleware = LLMToolEmulator(
tools=["get_weather"], model="anthropic:claude-3-5-sonnet-latest"
)
middleware = LLMToolEmulator(tools=["get_weather"], model="anthropic:claude-sonnet-4-5")
```

Emulate specific tools by passing tool instances:
Expand All @@ -71,7 +69,7 @@ def __init__(
If None (default), ALL tools will be emulated.
If empty list, no tools will be emulated.
model: Model to use for emulation.
Defaults to "anthropic:claude-3-5-sonnet-latest".
Defaults to "anthropic:claude-sonnet-4-5".
Can be a model identifier string or BaseChatModel instance.
"""
super().__init__()
Expand All @@ -91,7 +89,7 @@ def __init__(

# Initialize emulator model
if model is None:
self.model = init_chat_model("anthropic:claude-3-5-sonnet-latest", temperature=1)
self.model = init_chat_model("anthropic:claude-sonnet-4-5", temperature=1)
elif isinstance(model, BaseChatModel):
self.model = model
else:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ def _get_model(provider: str) -> Any:
if provider == "anthropic":
from langchain_anthropic import ChatAnthropic

return ChatAnthropic(model="claude-3-5-sonnet-20241022")
return ChatAnthropic(model="claude-sonnet-4-5-20250929")
elif provider == "openai":
from langchain_openai import ChatOpenAI

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -369,9 +369,7 @@ def test_custom_model_string(self) -> None:
"""Test passing a model string for emulation."""
# Just test that initialization works - don't require anthropic package
try:
emulator = LLMToolEmulator(
tools=["get_weather"], model="anthropic:claude-3-5-sonnet-latest"
)
emulator = LLMToolEmulator(tools=["get_weather"], model="anthropic:claude-sonnet-4-5")
assert emulator.model is not None
assert "get_weather" in emulator.tools_to_emulate
except ImportError:
Expand Down
28 changes: 14 additions & 14 deletions libs/partners/anthropic/langchain_anthropic/chat_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -820,7 +820,7 @@ class Joke(BaseModel):
image_url = "https://upload.wikimedia.org/wikipedia/commons/thumb/d/dd/Gfp-wisconsin-madison-the-nature-boardwalk.jpg/2560px-Gfp-wisconsin-madison-the-nature-boardwalk.jpg"
image_data = base64.b64encode(httpx.get(image_url).content).decode("utf-8")

model = ChatAnthropic(model="claude-3-5-sonnet-latest")
model = ChatAnthropic(model="claude-sonnet-4-5")
message = HumanMessage(
content=[
{
Expand Down Expand Up @@ -887,7 +887,7 @@ class Joke(BaseModel):
url = "https://www.w3.org/WAI/ER/tests/xhtml/testfiles/resources/pdf/dummy.pdf"
data = b64encode(requests.get(url).content).decode()

model = ChatAnthropic(model="claude-3-5-sonnet-latest")
model = ChatAnthropic(model="claude-sonnet-4-5")
ai_msg = model.invoke(
[
HumanMessage(
Expand Down Expand Up @@ -1948,7 +1948,7 @@ class GetPrice(BaseModel):
product: str = Field(..., description="The product to look up.")


model = ChatAnthropic(model="claude-3-5-sonnet-latest", temperature=0)
model = ChatAnthropic(model="claude-sonnet-4-5", temperature=0)
model_with_tools = model.bind_tools([GetWeather, GetPrice])
model_with_tools.invoke(
"What is the weather like in San Francisco",
Expand All @@ -1958,7 +1958,7 @@ class GetPrice(BaseModel):
# {'text': '<thinking>\nBased on the user\'s question, the relevant function to call is GetWeather, which requires the "location" parameter.\n\nThe user has directly specified the location as "San Francisco". Since San Francisco is a well known city, I can reasonably infer they mean San Francisco, CA without needing the state specified.\n\nAll the required parameters are provided, so I can proceed with the API call.\n</thinking>', 'type': 'text'},
# {'text': None, 'type': 'tool_use', 'id': 'toolu_01SCgExKzQ7eqSkMHfygvYuu', 'name': 'GetWeather', 'input': {'location': 'San Francisco, CA'}}
# ],
# response_metadata={'id': 'msg_01GM3zQtoFv8jGQMW7abLnhi', 'model': 'claude-3-5-sonnet-latest', 'stop_reason': 'tool_use', 'stop_sequence': None, 'usage': {'input_tokens': 487, 'output_tokens': 145}},
# response_metadata={'id': 'msg_01GM3zQtoFv8jGQMW7abLnhi', 'model': 'claude-sonnet-4-5', 'stop_reason': 'tool_use', 'stop_sequence': None, 'usage': {'input_tokens': 487, 'output_tokens': 145}},
# id='run-87b1331e-9251-4a68-acef-f0a018b639cc-0'
# )
```
Expand All @@ -1982,7 +1982,7 @@ class GetPrice(BaseModel):
product: str = Field(..., description="The product to look up.")


model = ChatAnthropic(model="claude-3-5-sonnet-latest", temperature=0)
model = ChatAnthropic(model="claude-sonnet-4-5", temperature=0)
model_with_tools = model.bind_tools([GetWeather, GetPrice], tool_choice="any")
model_with_tools.invoke(
"what is the weather like in San Francisco",
Expand All @@ -2008,7 +2008,7 @@ class GetPrice(BaseModel):
product: str = Field(..., description="The product to look up.")


model = ChatAnthropic(model="claude-3-5-sonnet-latest", temperature=0)
model = ChatAnthropic(model="claude-sonnet-4-5", temperature=0)
model_with_tools = model.bind_tools([GetWeather, GetPrice], tool_choice="GetWeather")
model_with_tools.invoke("What is the weather like in San Francisco")
```
Expand Down Expand Up @@ -2043,7 +2043,7 @@ class GetPrice(BaseModel):
# We need to pass in extra headers to enable use of the beta cache
# control API.
model = ChatAnthropic(
model="claude-3-5-sonnet-latest",
model="claude-sonnet-4-5",
temperature=0,
)
model_with_tools = model.bind_tools([GetWeather, cached_price_tool])
Expand All @@ -2068,7 +2068,7 @@ class GetPrice(BaseModel):
],
response_metadata={
"id": "msg_01Xg7Wr5inFWgBxE5jH9rpRo",
"model": "claude-3-5-sonnet-latest",
"model": "claude-sonnet-4-5",
"stop_reason": "tool_use",
"stop_sequence": None,
"usage": {
Expand Down Expand Up @@ -2113,7 +2113,7 @@ class GetPrice(BaseModel):
],
response_metadata={
"id": "msg_016RfWHrRvW6DAGCdwB6Ac64",
"model": "claude-3-5-sonnet-latest",
"model": "claude-sonnet-4-5",
"stop_reason": "tool_use",
"stop_sequence": None,
"usage": {
Expand Down Expand Up @@ -2240,7 +2240,7 @@ class AnswerWithJustification(BaseModel):
justification: str


model = ChatAnthropic(model="claude-3-5-sonnet-latest", temperature=0)
model = ChatAnthropic(model="claude-sonnet-4-5", temperature=0)
structured_model = model.with_structured_output(AnswerWithJustification)

structured_model.invoke("What weighs more a pound of bricks or a pound of feathers")
Expand All @@ -2265,7 +2265,7 @@ class AnswerWithJustification(BaseModel):
justification: str


model = ChatAnthropic(model="claude-3-5-sonnet-latest", temperature=0)
model = ChatAnthropic(model="claude-sonnet-4-5", temperature=0)
structured_model = model.with_structured_output(AnswerWithJustification, include_raw=True)

structured_model.invoke("What weighs more a pound of bricks or a pound of feathers")
Expand Down Expand Up @@ -2293,7 +2293,7 @@ class AnswerWithJustification(BaseModel):
"required": ["answer", "justification"],
},
}
model = ChatAnthropic(model="claude-3-5-sonnet-latest", temperature=0)
model = ChatAnthropic(model="claude-sonnet-4-5", temperature=0)
structured_model = model.with_structured_output(schema)

structured_model.invoke("What weighs more a pound of bricks or a pound of feathers")
Expand Down Expand Up @@ -2365,7 +2365,7 @@ def get_num_tokens_from_messages(
from langchain_anthropic import ChatAnthropic
from langchain_core.messages import HumanMessage, SystemMessage

model = ChatAnthropic(model="claude-3-5-sonnet-20241022")
model = ChatAnthropic(model="claude-sonnet-4-5-20250929")

messages = [
SystemMessage(content="You are a scientist"),
Expand All @@ -2385,7 +2385,7 @@ def get_num_tokens_from_messages(
from langchain_core.messages import HumanMessage
from langchain_core.tools import tool

model = ChatAnthropic(model="claude-3-5-sonnet-20241022")
model = ChatAnthropic(model="claude-sonnet-4-5-20250929")

@tool(parse_docstring=True)
def get_weather(location: str) -> str:
Expand Down
2 changes: 1 addition & 1 deletion libs/partners/anthropic/langchain_anthropic/llms.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@
class _AnthropicCommon(BaseLanguageModel):
client: Any = None #: :meta private:
async_client: Any = None #: :meta private:
model: str = Field(default="claude-3-5-sonnet-latest", alias="model_name")
model: str = Field(default="claude-sonnet-4-5", alias="model_name")
"""Model name to use."""

max_tokens: int = Field(default=1024, alias="max_tokens_to_sample")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -588,7 +588,7 @@ class GenerateUsername(BaseModel):


def test_disable_parallel_tool_calling() -> None:
llm = ChatAnthropic(model="claude-3-5-sonnet-20241022") # type: ignore[call-arg]
llm = ChatAnthropic(model="claude-sonnet-4-5-20250929") # type: ignore[call-arg]
llm_with_tools = llm.bind_tools([GenerateUsername], parallel_tool_calls=False)
result = llm_with_tools.invoke(
"Use the GenerateUsername tool to generate user names for:\n\n"
Expand Down Expand Up @@ -665,7 +665,7 @@ def test_with_structured_output() -> None:


def test_get_num_tokens_from_messages() -> None:
llm = ChatAnthropic(model="claude-3-5-sonnet-20241022") # type: ignore[call-arg]
llm = ChatAnthropic(model="claude-sonnet-4-5-20250929") # type: ignore[call-arg]

# Test simple case
messages = [
Expand Down
34 changes: 12 additions & 22 deletions libs/partners/anthropic/tests/unit_tests/test_chat_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,17 +55,17 @@ def test_streaming_attribute_should_stream(async_api: bool) -> None: # noqa: FB

def test_anthropic_client_caching() -> None:
"""Test that the OpenAI client is cached."""
llm1 = ChatAnthropic(model="claude-3-5-sonnet-latest")
llm2 = ChatAnthropic(model="claude-3-5-sonnet-latest")
llm1 = ChatAnthropic(model="claude-sonnet-4-5")
llm2 = ChatAnthropic(model="claude-sonnet-4-5")
assert llm1._client._client is llm2._client._client

llm3 = ChatAnthropic(model="claude-3-5-sonnet-latest", base_url="foo")
llm3 = ChatAnthropic(model="claude-sonnet-4-5", base_url="foo")
assert llm1._client._client is not llm3._client._client

llm4 = ChatAnthropic(model="claude-3-5-sonnet-latest", timeout=None)
llm4 = ChatAnthropic(model="claude-sonnet-4-5", timeout=None)
assert llm1._client._client is llm4._client._client

llm5 = ChatAnthropic(model="claude-3-5-sonnet-latest", timeout=3)
llm5 = ChatAnthropic(model="claude-sonnet-4-5", timeout=3)
assert llm1._client._client is not llm5._client._client


Expand All @@ -74,9 +74,7 @@ def test_anthropic_proxy_support() -> None:
proxy_url = "http://proxy.example.com:8080"

# Test sync client with proxy
llm_sync = ChatAnthropic(
model="claude-3-5-sonnet-latest", anthropic_proxy=proxy_url
)
llm_sync = ChatAnthropic(model="claude-sonnet-4-5", anthropic_proxy=proxy_url)
sync_client = llm_sync._client
assert sync_client is not None

Expand All @@ -85,10 +83,8 @@ def test_anthropic_proxy_support() -> None:
assert async_client is not None

# Test that clients with different proxy settings are not cached together
llm_no_proxy = ChatAnthropic(model="claude-3-5-sonnet-latest")
llm_with_proxy = ChatAnthropic(
model="claude-3-5-sonnet-latest", anthropic_proxy=proxy_url
)
llm_no_proxy = ChatAnthropic(model="claude-sonnet-4-5")
llm_with_proxy = ChatAnthropic(model="claude-sonnet-4-5", anthropic_proxy=proxy_url)

# Different proxy settings should result in different cached clients
assert llm_no_proxy._client._client is not llm_with_proxy._client._client
Expand All @@ -100,7 +96,7 @@ def test_anthropic_proxy_from_environment() -> None:

# Test with environment variable set
with patch.dict(os.environ, {"ANTHROPIC_PROXY": proxy_url}):
llm = ChatAnthropic(model="claude-3-5-sonnet-latest")
llm = ChatAnthropic(model="claude-sonnet-4-5")
assert llm.anthropic_proxy == proxy_url

# Should be able to create clients successfully
Expand All @@ -112,9 +108,7 @@ def test_anthropic_proxy_from_environment() -> None:
# Test that explicit parameter overrides environment variable
with patch.dict(os.environ, {"ANTHROPIC_PROXY": "http://env-proxy.com"}):
explicit_proxy = "http://explicit-proxy.com"
llm = ChatAnthropic(
model="claude-3-5-sonnet-latest", anthropic_proxy=explicit_proxy
)
llm = ChatAnthropic(model="claude-sonnet-4-5", anthropic_proxy=explicit_proxy)
assert llm.anthropic_proxy == explicit_proxy


Expand All @@ -132,10 +126,6 @@ def test_set_default_max_tokens() -> None:
llm = ChatAnthropic(model="claude-3-7-sonnet-latest", anthropic_api_key="test")
assert llm.max_tokens == 64000

# Test claude-3-5-sonnet models
llm = ChatAnthropic(model="claude-3-5-sonnet-latest", anthropic_api_key="test")
assert llm.max_tokens == 8192

# Test claude-3-5-haiku models
llm = ChatAnthropic(model="claude-3-5-haiku-latest", anthropic_api_key="test")
assert llm.max_tokens == 8192
Expand All @@ -146,13 +136,13 @@ def test_set_default_max_tokens() -> None:

# Test that existing max_tokens values are preserved
llm = ChatAnthropic(
model="claude-3-5-sonnet-latest", max_tokens=2048, anthropic_api_key="test"
model="claude-sonnet-4-5", max_tokens=2048, anthropic_api_key="test"
)
assert llm.max_tokens == 2048

# Test that explicitly set max_tokens values are preserved
llm = ChatAnthropic(
model="claude-3-5-sonnet-latest", max_tokens=4096, anthropic_api_key="test"
model="claude-sonnet-4-5", max_tokens=4096, anthropic_api_key="test"
)
assert llm.max_tokens == 4096

Expand Down