Skip to content

Commit fabcacc

Browse files
1 parent ac58d75 commit fabcacc

File tree

8 files changed

+35
-49
lines changed

8 files changed

+35
-49
lines changed

libs/langchain_v1/langchain/agents/middleware/model_fallback.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,7 @@ class ModelFallbackMiddleware(AgentMiddleware):
3131
3232
fallback = ModelFallbackMiddleware(
3333
"openai:gpt-4o-mini", # Try first on error
34-
"anthropic:claude-3-5-sonnet-20241022", # Then this
34+
"anthropic:claude-sonnet-4-5-20250929", # Then this
3535
)
3636
3737
agent = create_agent(

libs/langchain_v1/langchain/agents/middleware/tool_emulator.py

Lines changed: 3 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -47,9 +47,7 @@ class LLMToolEmulator(AgentMiddleware):
4747
4848
Use a custom model for emulation:
4949
```python
50-
middleware = LLMToolEmulator(
51-
tools=["get_weather"], model="anthropic:claude-3-5-sonnet-latest"
52-
)
50+
middleware = LLMToolEmulator(tools=["get_weather"], model="anthropic:claude-sonnet-4-5")
5351
```
5452
5553
Emulate specific tools by passing tool instances:
@@ -71,7 +69,7 @@ def __init__(
7169
If None (default), ALL tools will be emulated.
7270
If empty list, no tools will be emulated.
7371
model: Model to use for emulation.
74-
Defaults to "anthropic:claude-3-5-sonnet-latest".
72+
Defaults to "anthropic:claude-sonnet-4-5".
7573
Can be a model identifier string or BaseChatModel instance.
7674
"""
7775
super().__init__()
@@ -91,7 +89,7 @@ def __init__(
9189

9290
# Initialize emulator model
9391
if model is None:
94-
self.model = init_chat_model("anthropic:claude-3-5-sonnet-latest", temperature=1)
92+
self.model = init_chat_model("anthropic:claude-sonnet-4-5", temperature=1)
9593
elif isinstance(model, BaseChatModel):
9694
self.model = model
9795
else:

libs/langchain_v1/tests/integration_tests/agents/middleware/test_shell_tool_integration.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@ def _get_model(provider: str) -> Any:
1717
if provider == "anthropic":
1818
from langchain_anthropic import ChatAnthropic
1919

20-
return ChatAnthropic(model="claude-3-5-sonnet-20241022")
20+
return ChatAnthropic(model="claude-sonnet-4-5-20250929")
2121
elif provider == "openai":
2222
from langchain_openai import ChatOpenAI
2323

libs/langchain_v1/tests/unit_tests/agents/middleware/test_tool_emulator.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -369,9 +369,7 @@ def test_custom_model_string(self) -> None:
369369
"""Test passing a model string for emulation."""
370370
# Just test that initialization works - don't require anthropic package
371371
try:
372-
emulator = LLMToolEmulator(
373-
tools=["get_weather"], model="anthropic:claude-3-5-sonnet-latest"
374-
)
372+
emulator = LLMToolEmulator(tools=["get_weather"], model="anthropic:claude-sonnet-4-5")
375373
assert emulator.model is not None
376374
assert "get_weather" in emulator.tools_to_emulate
377375
except ImportError:

libs/partners/anthropic/langchain_anthropic/chat_models.py

Lines changed: 14 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -820,7 +820,7 @@ class Joke(BaseModel):
820820
image_url = "https://upload.wikimedia.org/wikipedia/commons/thumb/d/dd/Gfp-wisconsin-madison-the-nature-boardwalk.jpg/2560px-Gfp-wisconsin-madison-the-nature-boardwalk.jpg"
821821
image_data = base64.b64encode(httpx.get(image_url).content).decode("utf-8")
822822
823-
model = ChatAnthropic(model="claude-3-5-sonnet-latest")
823+
model = ChatAnthropic(model="claude-sonnet-4-5")
824824
message = HumanMessage(
825825
content=[
826826
{
@@ -887,7 +887,7 @@ class Joke(BaseModel):
887887
url = "https://www.w3.org/WAI/ER/tests/xhtml/testfiles/resources/pdf/dummy.pdf"
888888
data = b64encode(requests.get(url).content).decode()
889889
890-
model = ChatAnthropic(model="claude-3-5-sonnet-latest")
890+
model = ChatAnthropic(model="claude-sonnet-4-5")
891891
ai_msg = model.invoke(
892892
[
893893
HumanMessage(
@@ -1948,7 +1948,7 @@ class GetPrice(BaseModel):
19481948
product: str = Field(..., description="The product to look up.")
19491949
19501950
1951-
model = ChatAnthropic(model="claude-3-5-sonnet-latest", temperature=0)
1951+
model = ChatAnthropic(model="claude-sonnet-4-5", temperature=0)
19521952
model_with_tools = model.bind_tools([GetWeather, GetPrice])
19531953
model_with_tools.invoke(
19541954
"What is the weather like in San Francisco",
@@ -1958,7 +1958,7 @@ class GetPrice(BaseModel):
19581958
# {'text': '<thinking>\nBased on the user\'s question, the relevant function to call is GetWeather, which requires the "location" parameter.\n\nThe user has directly specified the location as "San Francisco". Since San Francisco is a well known city, I can reasonably infer they mean San Francisco, CA without needing the state specified.\n\nAll the required parameters are provided, so I can proceed with the API call.\n</thinking>', 'type': 'text'},
19591959
# {'text': None, 'type': 'tool_use', 'id': 'toolu_01SCgExKzQ7eqSkMHfygvYuu', 'name': 'GetWeather', 'input': {'location': 'San Francisco, CA'}}
19601960
# ],
1961-
# response_metadata={'id': 'msg_01GM3zQtoFv8jGQMW7abLnhi', 'model': 'claude-3-5-sonnet-latest', 'stop_reason': 'tool_use', 'stop_sequence': None, 'usage': {'input_tokens': 487, 'output_tokens': 145}},
1961+
# response_metadata={'id': 'msg_01GM3zQtoFv8jGQMW7abLnhi', 'model': 'claude-sonnet-4-5', 'stop_reason': 'tool_use', 'stop_sequence': None, 'usage': {'input_tokens': 487, 'output_tokens': 145}},
19621962
# id='run-87b1331e-9251-4a68-acef-f0a018b639cc-0'
19631963
# )
19641964
```
@@ -1982,7 +1982,7 @@ class GetPrice(BaseModel):
19821982
product: str = Field(..., description="The product to look up.")
19831983
19841984
1985-
model = ChatAnthropic(model="claude-3-5-sonnet-latest", temperature=0)
1985+
model = ChatAnthropic(model="claude-sonnet-4-5", temperature=0)
19861986
model_with_tools = model.bind_tools([GetWeather, GetPrice], tool_choice="any")
19871987
model_with_tools.invoke(
19881988
"what is the weather like in San Francisco",
@@ -2008,7 +2008,7 @@ class GetPrice(BaseModel):
20082008
product: str = Field(..., description="The product to look up.")
20092009
20102010
2011-
model = ChatAnthropic(model="claude-3-5-sonnet-latest", temperature=0)
2011+
model = ChatAnthropic(model="claude-sonnet-4-5", temperature=0)
20122012
model_with_tools = model.bind_tools([GetWeather, GetPrice], tool_choice="GetWeather")
20132013
model_with_tools.invoke("What is the weather like in San Francisco")
20142014
```
@@ -2043,7 +2043,7 @@ class GetPrice(BaseModel):
20432043
# We need to pass in extra headers to enable use of the beta cache
20442044
# control API.
20452045
model = ChatAnthropic(
2046-
model="claude-3-5-sonnet-latest",
2046+
model="claude-sonnet-4-5",
20472047
temperature=0,
20482048
)
20492049
model_with_tools = model.bind_tools([GetWeather, cached_price_tool])
@@ -2068,7 +2068,7 @@ class GetPrice(BaseModel):
20682068
],
20692069
response_metadata={
20702070
"id": "msg_01Xg7Wr5inFWgBxE5jH9rpRo",
2071-
"model": "claude-3-5-sonnet-latest",
2071+
"model": "claude-sonnet-4-5",
20722072
"stop_reason": "tool_use",
20732073
"stop_sequence": None,
20742074
"usage": {
@@ -2113,7 +2113,7 @@ class GetPrice(BaseModel):
21132113
],
21142114
response_metadata={
21152115
"id": "msg_016RfWHrRvW6DAGCdwB6Ac64",
2116-
"model": "claude-3-5-sonnet-latest",
2116+
"model": "claude-sonnet-4-5",
21172117
"stop_reason": "tool_use",
21182118
"stop_sequence": None,
21192119
"usage": {
@@ -2240,7 +2240,7 @@ class AnswerWithJustification(BaseModel):
22402240
justification: str
22412241
22422242
2243-
model = ChatAnthropic(model="claude-3-5-sonnet-latest", temperature=0)
2243+
model = ChatAnthropic(model="claude-sonnet-4-5", temperature=0)
22442244
structured_model = model.with_structured_output(AnswerWithJustification)
22452245
22462246
structured_model.invoke("What weighs more a pound of bricks or a pound of feathers")
@@ -2265,7 +2265,7 @@ class AnswerWithJustification(BaseModel):
22652265
justification: str
22662266
22672267
2268-
model = ChatAnthropic(model="claude-3-5-sonnet-latest", temperature=0)
2268+
model = ChatAnthropic(model="claude-sonnet-4-5", temperature=0)
22692269
structured_model = model.with_structured_output(AnswerWithJustification, include_raw=True)
22702270
22712271
structured_model.invoke("What weighs more a pound of bricks or a pound of feathers")
@@ -2293,7 +2293,7 @@ class AnswerWithJustification(BaseModel):
22932293
"required": ["answer", "justification"],
22942294
},
22952295
}
2296-
model = ChatAnthropic(model="claude-3-5-sonnet-latest", temperature=0)
2296+
model = ChatAnthropic(model="claude-sonnet-4-5", temperature=0)
22972297
structured_model = model.with_structured_output(schema)
22982298
22992299
structured_model.invoke("What weighs more a pound of bricks or a pound of feathers")
@@ -2365,7 +2365,7 @@ def get_num_tokens_from_messages(
23652365
from langchain_anthropic import ChatAnthropic
23662366
from langchain_core.messages import HumanMessage, SystemMessage
23672367
2368-
model = ChatAnthropic(model="claude-3-5-sonnet-20241022")
2368+
model = ChatAnthropic(model="claude-sonnet-4-5-20250929")
23692369
23702370
messages = [
23712371
SystemMessage(content="You are a scientist"),
@@ -2385,7 +2385,7 @@ def get_num_tokens_from_messages(
23852385
from langchain_core.messages import HumanMessage
23862386
from langchain_core.tools import tool
23872387
2388-
model = ChatAnthropic(model="claude-3-5-sonnet-20241022")
2388+
model = ChatAnthropic(model="claude-sonnet-4-5-20250929")
23892389
23902390
@tool(parse_docstring=True)
23912391
def get_weather(location: str) -> str:

libs/partners/anthropic/langchain_anthropic/llms.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@
2525
class _AnthropicCommon(BaseLanguageModel):
2626
client: Any = None #: :meta private:
2727
async_client: Any = None #: :meta private:
28-
model: str = Field(default="claude-3-5-sonnet-latest", alias="model_name")
28+
model: str = Field(default="claude-sonnet-4-5", alias="model_name")
2929
"""Model name to use."""
3030

3131
max_tokens: int = Field(default=1024, alias="max_tokens_to_sample")

libs/partners/anthropic/tests/integration_tests/test_chat_models.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -588,7 +588,7 @@ class GenerateUsername(BaseModel):
588588

589589

590590
def test_disable_parallel_tool_calling() -> None:
591-
llm = ChatAnthropic(model="claude-3-5-sonnet-20241022") # type: ignore[call-arg]
591+
llm = ChatAnthropic(model="claude-sonnet-4-5-20250929") # type: ignore[call-arg]
592592
llm_with_tools = llm.bind_tools([GenerateUsername], parallel_tool_calls=False)
593593
result = llm_with_tools.invoke(
594594
"Use the GenerateUsername tool to generate user names for:\n\n"
@@ -665,7 +665,7 @@ def test_with_structured_output() -> None:
665665

666666

667667
def test_get_num_tokens_from_messages() -> None:
668-
llm = ChatAnthropic(model="claude-3-5-sonnet-20241022") # type: ignore[call-arg]
668+
llm = ChatAnthropic(model="claude-sonnet-4-5-20250929") # type: ignore[call-arg]
669669

670670
# Test simple case
671671
messages = [

libs/partners/anthropic/tests/unit_tests/test_chat_models.py

Lines changed: 12 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -55,17 +55,17 @@ def test_streaming_attribute_should_stream(async_api: bool) -> None: # noqa: FB
5555

5656
def test_anthropic_client_caching() -> None:
5757
"""Test that the OpenAI client is cached."""
58-
llm1 = ChatAnthropic(model="claude-3-5-sonnet-latest")
59-
llm2 = ChatAnthropic(model="claude-3-5-sonnet-latest")
58+
llm1 = ChatAnthropic(model="claude-sonnet-4-5")
59+
llm2 = ChatAnthropic(model="claude-sonnet-4-5")
6060
assert llm1._client._client is llm2._client._client
6161

62-
llm3 = ChatAnthropic(model="claude-3-5-sonnet-latest", base_url="foo")
62+
llm3 = ChatAnthropic(model="claude-sonnet-4-5", base_url="foo")
6363
assert llm1._client._client is not llm3._client._client
6464

65-
llm4 = ChatAnthropic(model="claude-3-5-sonnet-latest", timeout=None)
65+
llm4 = ChatAnthropic(model="claude-sonnet-4-5", timeout=None)
6666
assert llm1._client._client is llm4._client._client
6767

68-
llm5 = ChatAnthropic(model="claude-3-5-sonnet-latest", timeout=3)
68+
llm5 = ChatAnthropic(model="claude-sonnet-4-5", timeout=3)
6969
assert llm1._client._client is not llm5._client._client
7070

7171

@@ -74,9 +74,7 @@ def test_anthropic_proxy_support() -> None:
7474
proxy_url = "http://proxy.example.com:8080"
7575

7676
# Test sync client with proxy
77-
llm_sync = ChatAnthropic(
78-
model="claude-3-5-sonnet-latest", anthropic_proxy=proxy_url
79-
)
77+
llm_sync = ChatAnthropic(model="claude-sonnet-4-5", anthropic_proxy=proxy_url)
8078
sync_client = llm_sync._client
8179
assert sync_client is not None
8280

@@ -85,10 +83,8 @@ def test_anthropic_proxy_support() -> None:
8583
assert async_client is not None
8684

8785
# Test that clients with different proxy settings are not cached together
88-
llm_no_proxy = ChatAnthropic(model="claude-3-5-sonnet-latest")
89-
llm_with_proxy = ChatAnthropic(
90-
model="claude-3-5-sonnet-latest", anthropic_proxy=proxy_url
91-
)
86+
llm_no_proxy = ChatAnthropic(model="claude-sonnet-4-5")
87+
llm_with_proxy = ChatAnthropic(model="claude-sonnet-4-5", anthropic_proxy=proxy_url)
9288

9389
# Different proxy settings should result in different cached clients
9490
assert llm_no_proxy._client._client is not llm_with_proxy._client._client
@@ -100,7 +96,7 @@ def test_anthropic_proxy_from_environment() -> None:
10096

10197
# Test with environment variable set
10298
with patch.dict(os.environ, {"ANTHROPIC_PROXY": proxy_url}):
103-
llm = ChatAnthropic(model="claude-3-5-sonnet-latest")
99+
llm = ChatAnthropic(model="claude-sonnet-4-5")
104100
assert llm.anthropic_proxy == proxy_url
105101

106102
# Should be able to create clients successfully
@@ -112,9 +108,7 @@ def test_anthropic_proxy_from_environment() -> None:
112108
# Test that explicit parameter overrides environment variable
113109
with patch.dict(os.environ, {"ANTHROPIC_PROXY": "http://env-proxy.com"}):
114110
explicit_proxy = "http://explicit-proxy.com"
115-
llm = ChatAnthropic(
116-
model="claude-3-5-sonnet-latest", anthropic_proxy=explicit_proxy
117-
)
111+
llm = ChatAnthropic(model="claude-sonnet-4-5", anthropic_proxy=explicit_proxy)
118112
assert llm.anthropic_proxy == explicit_proxy
119113

120114

@@ -132,10 +126,6 @@ def test_set_default_max_tokens() -> None:
132126
llm = ChatAnthropic(model="claude-3-7-sonnet-latest", anthropic_api_key="test")
133127
assert llm.max_tokens == 64000
134128

135-
# Test claude-3-5-sonnet models
136-
llm = ChatAnthropic(model="claude-3-5-sonnet-latest", anthropic_api_key="test")
137-
assert llm.max_tokens == 8192
138-
139129
# Test claude-3-5-haiku models
140130
llm = ChatAnthropic(model="claude-3-5-haiku-latest", anthropic_api_key="test")
141131
assert llm.max_tokens == 8192
@@ -146,13 +136,13 @@ def test_set_default_max_tokens() -> None:
146136

147137
# Test that existing max_tokens values are preserved
148138
llm = ChatAnthropic(
149-
model="claude-3-5-sonnet-latest", max_tokens=2048, anthropic_api_key="test"
139+
model="claude-sonnet-4-5", max_tokens=2048, anthropic_api_key="test"
150140
)
151141
assert llm.max_tokens == 2048
152142

153143
# Test that explicitly set max_tokens values are preserved
154144
llm = ChatAnthropic(
155-
model="claude-3-5-sonnet-latest", max_tokens=4096, anthropic_api_key="test"
145+
model="claude-sonnet-4-5", max_tokens=4096, anthropic_api_key="test"
156146
)
157147
assert llm.max_tokens == 4096
158148

0 commit comments

Comments
 (0)