Skip to content

Commit b81108f

Browse files
authored
chore: Update anthropic default model (#2551)
* Update AnthropicChatGenerator default model to claude-sonnet-4-5 * Update tests * Remove fragile check, not all tool calls have thinking prior * Minor test change
1 parent b85b1f4 commit b81108f

File tree

4 files changed

+34
-43
lines changed

4 files changed

+34
-43
lines changed

integrations/anthropic/src/haystack_integrations/components/generators/anthropic/chat/chat_generator.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -64,7 +64,6 @@ class AnthropicChatGenerator:
6464
from haystack.dataclasses import ChatMessage
6565
6666
generator = AnthropicChatGenerator(
67-
model="claude-sonnet-4-20250514",
6867
generation_kwargs={
6968
"max_tokens": 1000,
7069
"temperature": 0.7,
@@ -113,7 +112,7 @@ class AnthropicChatGenerator:
113112
def __init__(
114113
self,
115114
api_key: Secret = Secret.from_env_var("ANTHROPIC_API_KEY"), # noqa: B008
116-
model: str = "claude-sonnet-4-20250514",
115+
model: str = "claude-sonnet-4-5",
117116
streaming_callback: Optional[StreamingCallbackT] = None,
118117
generation_kwargs: Optional[dict[str, Any]] = None,
119118
ignore_tools_thinking_messages: bool = True,

integrations/anthropic/tests/conftest.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@ def mock_chat_completion():
1414
completion = Message(
1515
id="foo",
1616
content=[{"type": "text", "text": "Hello, world!"}],
17-
model="claude-sonnet-4-20250514",
17+
model="claude-sonnet-4-5",
1818
role="assistant",
1919
type="message",
2020
usage={"input_tokens": 57, "output_tokens": 40},
@@ -36,7 +36,7 @@ def mock_chat_completion_extended_thinking():
3636
{"type": "thinking", "thinking": "This is a thinking part!", "signature": ""},
3737
{"type": "text", "text": "Hello, world!"},
3838
],
39-
model="claude-sonnet-4-20250514",
39+
model="claude-sonnet-4-5",
4040
role="assistant",
4141
type="message",
4242
usage={"input_tokens": 57, "output_tokens": 40},

integrations/anthropic/tests/test_chat_generator.py

Lines changed: 30 additions & 38 deletions
Original file line numberDiff line numberDiff line change
@@ -95,7 +95,7 @@ def mock_anthropic_completion():
9595
completion = Message(
9696
id="foo",
9797
type="message",
98-
model="claude-sonnet-4-20250514",
98+
model="claude-sonnet-4-5",
9999
role="assistant",
100100
content=[TextBlockParam(type="text", text="Hello! I'm Claude.")],
101101
stop_reason="end_turn",
@@ -113,7 +113,7 @@ def test_init_default(self, monkeypatch):
113113
monkeypatch.setenv("ANTHROPIC_API_KEY", "test-api-key")
114114
component = AnthropicChatGenerator()
115115
assert component.client.api_key == "test-api-key"
116-
assert component.model == "claude-sonnet-4-20250514"
116+
assert component.model == "claude-sonnet-4-5"
117117
assert component.streaming_callback is None
118118
assert not component.generation_kwargs
119119
assert component.tools is None
@@ -146,13 +146,13 @@ def test_init_with_parameters(self, monkeypatch):
146146
monkeypatch.setenv("OPENAI_MAX_RETRIES", "10")
147147
component = AnthropicChatGenerator(
148148
api_key=Secret.from_token("test-api-key"),
149-
model="claude-sonnet-4-20250514",
149+
model="claude-sonnet-4-5",
150150
streaming_callback=print_streaming_chunk,
151151
generation_kwargs={"max_tokens": 10, "some_test_param": "test-params"},
152152
tools=[tool],
153153
)
154154
assert component.client.api_key == "test-api-key"
155-
assert component.model == "claude-sonnet-4-20250514"
155+
assert component.model == "claude-sonnet-4-5"
156156
assert component.streaming_callback is print_streaming_chunk
157157
assert component.generation_kwargs == {"max_tokens": 10, "some_test_param": "test-params"}
158158
assert component.tools == [tool]
@@ -164,13 +164,13 @@ def test_init_with_parameters_and_env_vars(self, monkeypatch):
164164
monkeypatch.setenv("OPENAI_TIMEOUT", "100")
165165
monkeypatch.setenv("OPENAI_MAX_RETRIES", "10")
166166
component = AnthropicChatGenerator(
167-
model="claude-sonnet-4-20250514",
167+
model="claude-sonnet-4-5",
168168
api_key=Secret.from_token("test-api-key"),
169169
streaming_callback=print_streaming_chunk,
170170
generation_kwargs={"max_tokens": 10, "some_test_param": "test-params"},
171171
)
172172
assert component.client.api_key == "test-api-key"
173-
assert component.model == "claude-sonnet-4-20250514"
173+
assert component.model == "claude-sonnet-4-5"
174174
assert component.streaming_callback is print_streaming_chunk
175175
assert component.generation_kwargs == {"max_tokens": 10, "some_test_param": "test-params"}
176176

@@ -185,7 +185,7 @@ def test_to_dict_default(self, monkeypatch):
185185
"type": "haystack_integrations.components.generators.anthropic.chat.chat_generator.AnthropicChatGenerator",
186186
"init_parameters": {
187187
"api_key": {"env_vars": ["ANTHROPIC_API_KEY"], "type": "env_var", "strict": True},
188-
"model": "claude-sonnet-4-20250514",
188+
"model": "claude-sonnet-4-5",
189189
"streaming_callback": None,
190190
"ignore_tools_thinking_messages": True,
191191
"generation_kwargs": {},
@@ -204,7 +204,7 @@ def test_to_dict_with_parameters(self, monkeypatch):
204204
monkeypatch.setenv("ENV_VAR", "test-api-key")
205205
component = AnthropicChatGenerator(
206206
api_key=Secret.from_env_var("ENV_VAR"),
207-
model="claude-sonnet-4-20250514",
207+
model="claude-sonnet-4-5",
208208
streaming_callback=print_streaming_chunk,
209209
generation_kwargs={"max_tokens": 10, "some_test_param": "test-params"},
210210
tools=[tool],
@@ -217,7 +217,7 @@ def test_to_dict_with_parameters(self, monkeypatch):
217217
"type": "haystack_integrations.components.generators.anthropic.chat.chat_generator.AnthropicChatGenerator",
218218
"init_parameters": {
219219
"api_key": {"env_vars": ["ENV_VAR"], "type": "env_var", "strict": True},
220-
"model": "claude-sonnet-4-20250514",
220+
"model": "claude-sonnet-4-5",
221221
"streaming_callback": "haystack.components.generators.utils.print_streaming_chunk",
222222
"ignore_tools_thinking_messages": True,
223223
"generation_kwargs": {"max_tokens": 10, "some_test_param": "test-params"},
@@ -261,7 +261,7 @@ def test_from_dict(self, monkeypatch):
261261
"type": "haystack_integrations.components.generators.anthropic.chat.chat_generator.AnthropicChatGenerator",
262262
"init_parameters": {
263263
"api_key": {"env_vars": ["ANTHROPIC_API_KEY"], "type": "env_var", "strict": True},
264-
"model": "claude-sonnet-4-20250514",
264+
"model": "claude-sonnet-4-5",
265265
"streaming_callback": "haystack.components.generators.utils.print_streaming_chunk",
266266
"generation_kwargs": {"max_tokens": 10, "some_test_param": "test-params"},
267267
"tools": [
@@ -284,7 +284,7 @@ def test_from_dict(self, monkeypatch):
284284
component = AnthropicChatGenerator.from_dict(data)
285285

286286
assert isinstance(component, AnthropicChatGenerator)
287-
assert component.model == "claude-sonnet-4-20250514"
287+
assert component.model == "claude-sonnet-4-5"
288288
assert component.streaming_callback is print_streaming_chunk
289289
assert component.generation_kwargs == {"max_tokens": 10, "some_test_param": "test-params"}
290290
assert component.api_key == Secret.from_env_var("ANTHROPIC_API_KEY")
@@ -301,7 +301,7 @@ def test_from_dict_fail_wo_env_var(self, monkeypatch):
301301
"type": "haystack_integrations.components.generators.anthropic.chat.chat_generator.AnthropicChatGenerator",
302302
"init_parameters": {
303303
"api_key": {"env_vars": ["ANTHROPIC_API_KEY"], "type": "env_var", "strict": True},
304-
"model": "claude-sonnet-4-20250514",
304+
"model": "claude-sonnet-4-5",
305305
"streaming_callback": "haystack.components.generators.utils.print_streaming_chunk",
306306
"generation_kwargs": {"max_tokens": 10, "some_test_param": "test-params"},
307307
},
@@ -341,7 +341,7 @@ def test_run_with_params(self, chat_messages, mock_anthropic_completion):
341341
assert len(response["replies"]) == 1
342342
assert isinstance(response["replies"][0], ChatMessage)
343343
assert "Hello! I'm Claude." in response["replies"][0].text
344-
assert response["replies"][0].meta["model"] == "claude-sonnet-4-20250514"
344+
assert response["replies"][0].meta["model"] == "claude-sonnet-4-5"
345345
assert response["replies"][0].meta["finish_reason"] == "stop"
346346

347347
def test_check_duplicate_tool_names(self, tools):
@@ -358,7 +358,7 @@ def test_convert_chat_completion_to_chat_message(self, mock_chat_completion):
358358
chat_message = _convert_chat_completion_to_chat_message(chat_completion, ignore_tools_thinking_messages=True)
359359
assert chat_message.text == "Hello, world!"
360360
assert chat_message.role == "assistant"
361-
assert chat_message.meta["model"] == "claude-sonnet-4-20250514"
361+
assert chat_message.meta["model"] == "claude-sonnet-4-5"
362362
assert "usage" in chat_message.meta
363363
assert chat_message.meta["usage"]["prompt_tokens"] == 57
364364
assert chat_message.meta["usage"]["completion_tokens"] == 40
@@ -376,7 +376,7 @@ def test_convert_chat_completion_to_chat_message_with_reasoning_and_tool_call(se
376376
id="toolu_01XEkx", input={"expression": "7 * (4 + 2)"}, name="calculator", type="tool_use"
377377
),
378378
],
379-
model="claude-sonnet-4-20250514",
379+
model="claude-sonnet-4-5",
380380
role="assistant",
381381
stop_reason="tool_use",
382382
stop_sequence=None,
@@ -391,7 +391,7 @@ def test_convert_chat_completion_to_chat_message_with_reasoning_and_tool_call(se
391391
{"reasoning_content": {"reasoning_text": {"text": "User has asked 2 questions", "signature": "sign1"}}}
392392
]
393393
}
394-
assert chat_message.meta["model"] == "claude-sonnet-4-20250514"
394+
assert chat_message.meta["model"] == "claude-sonnet-4-5"
395395
assert chat_message.meta["finish_reason"] == "tool_calls"
396396
assert "usage" in chat_message.meta
397397
assert chat_message.meta["usage"]["prompt_tokens"] == 507
@@ -411,7 +411,7 @@ def test_convert_anthropic_completion_chunks_with_multiple_tool_calls_and_reason
411411
message=Message(
412412
id="msg_01ApGaijiGeLtxWLCKUKELfT",
413413
content=[],
414-
model="claude-sonnet-4-20250514",
414+
model="claude-sonnet-4-5",
415415
role="assistant",
416416
stop_reason=None,
417417
stop_sequence=None,
@@ -728,7 +728,7 @@ def test_convert_streaming_chunks_to_chat_message_with_multiple_tool_calls(self)
728728
"type": "message",
729729
"role": "assistant",
730730
"content": [],
731-
"model": "claude-sonnet-4-20250514",
731+
"model": "claude-sonnet-4-5",
732732
"stop_reason": None,
733733
"stop_sequence": None,
734734
"usage": {"input_tokens": 25, "output_tokens": 0},
@@ -888,7 +888,7 @@ def test_convert_streaming_chunks_to_chat_message_tool_call_with_empty_arguments
888888
"type": "message",
889889
"role": "assistant",
890890
"content": [],
891-
"model": "claude-sonnet-4-20250514",
891+
"model": "claude-sonnet-4-5",
892892
"stop_reason": None,
893893
"stop_sequence": None,
894894
"usage": {"input_tokens": 25, "output_tokens": 0},
@@ -986,7 +986,7 @@ def test_serde_in_pipeline(self):
986986

987987
generator = AnthropicChatGenerator(
988988
api_key=Secret.from_env_var("ANTHROPIC_API_KEY", strict=False),
989-
model="claude-sonnet-4-20250514",
989+
model="claude-sonnet-4-5",
990990
generation_kwargs={"temperature": 0.6},
991991
tools=[tool],
992992
)
@@ -1006,7 +1006,7 @@ def test_serde_in_pipeline(self):
10061006
"type": type_,
10071007
"init_parameters": {
10081008
"api_key": {"type": "env_var", "env_vars": ["ANTHROPIC_API_KEY"], "strict": False},
1009-
"model": "claude-sonnet-4-20250514",
1009+
"model": "claude-sonnet-4-5",
10101010
"generation_kwargs": {"temperature": 0.6},
10111011
"ignore_tools_thinking_messages": True,
10121012
"streaming_callback": None,
@@ -1068,7 +1068,7 @@ def test_live_run(self):
10681068
assert len(results["replies"]) == 1
10691069
message: ChatMessage = results["replies"][0]
10701070
assert "Paris" in message.text
1071-
assert "claude-sonnet-4-20250514" in message.meta["model"]
1071+
assert "claude-sonnet-4-5" in message.meta["model"]
10721072
assert message.meta["finish_reason"] == "stop"
10731073

10741074
@pytest.mark.skipif(
@@ -1110,7 +1110,7 @@ def __call__(self, chunk: StreamingChunk) -> None:
11101110
message: ChatMessage = results["replies"][0]
11111111
assert "Paris" in message.text
11121112

1113-
assert "claude-sonnet-4-20250514" in message.meta["model"]
1113+
assert "claude-sonnet-4-5" in message.meta["model"]
11141114
assert message.meta["finish_reason"] == "stop"
11151115
assert callback.counter > 1
11161116
assert "Paris" in callback.responses
@@ -1527,11 +1527,6 @@ def test_live_run_with_tools_streaming(self, tools):
15271527
assert len(results["replies"]) == 1
15281528
message = results["replies"][0]
15291529

1530-
# this is Anthropic message prior to tool call
1531-
assert message.text is not None
1532-
assert "weather" in message.text.lower()
1533-
assert "paris" in message.text.lower()
1534-
15351530
# now we have the tool call
15361531
assert message.tool_calls
15371532
tool_call = message.tool_call
@@ -1621,9 +1616,6 @@ def test_live_run_with_tool_with_no_args_streaming(self, tool_with_no_parameters
16211616
assert len(results["replies"]) == 1
16221617
message = results["replies"][0]
16231618

1624-
# this is Anthropic thinking message prior to tool call
1625-
assert message.text is not None
1626-
16271619
# now we have the tool call
16281620
assert message.tool_calls
16291621
tool_call = message.tool_call
@@ -1884,7 +1876,7 @@ def test_from_dict_with_prompt_caching(self, monkeypatch):
18841876
"type": "haystack_integrations.components.generators.anthropic.chat.chat_generator.AnthropicChatGenerator",
18851877
"init_parameters": {
18861878
"api_key": {"env_vars": ["ANTHROPIC_API_KEY"], "strict": True, "type": "env_var"},
1887-
"model": "claude-sonnet-4-20250514",
1879+
"model": "claude-sonnet-4-5",
18881880
"generation_kwargs": {"extra_headers": {"anthropic-beta": "prompt-caching-2024-07-31"}},
18891881
},
18901882
}
@@ -2029,7 +2021,7 @@ def test_prompt_caching_live_run_with_user_message(self, cache_enabled):
20292021
@pytest.mark.integration
20302022
def test_live_run_with_reasoning(self, streaming_callback):
20312023
chat_generator = AnthropicChatGenerator(
2032-
model="claude-sonnet-4-20250514",
2024+
model="claude-sonnet-4-5",
20332025
generation_kwargs={"thinking": {"type": "enabled", "budget_tokens": 10000}, "max_tokens": 11000},
20342026
streaming_callback=streaming_callback,
20352027
)
@@ -2114,7 +2106,7 @@ async def mock_anthropic_completion_async(self):
21142106
completion = Message(
21152107
id="foo",
21162108
type="message",
2117-
model="claude-sonnet-4-20250514",
2109+
model="claude-sonnet-4-5",
21182110
role="assistant",
21192111
content=[TextBlockParam(type="text", text="Hello! I'm Claude.")],
21202112
stop_reason="end_turn",
@@ -2130,7 +2122,7 @@ async def mock_anthropic_completion_async_with_tool(self):
21302122
completion = Message(
21312123
id="foo",
21322124
type="message",
2133-
model="claude-sonnet-4-20250514",
2125+
model="claude-sonnet-4-5",
21342126
role="assistant",
21352127
content=[
21362128
TextBlockParam(type="text", text="Let me check the weather for you."),
@@ -2181,7 +2173,7 @@ async def test_run_async_with_params(self, chat_messages, mock_anthropic_complet
21812173
assert len(response["replies"]) == 1
21822174
assert isinstance(response["replies"][0], ChatMessage)
21832175
assert "Hello! I'm Claude." in response["replies"][0].text
2184-
assert response["replies"][0].meta["model"] == "claude-sonnet-4-20250514"
2176+
assert response["replies"][0].meta["model"] == "claude-sonnet-4-5"
21852177
assert response["replies"][0].meta["finish_reason"] == "stop"
21862178
assert "completion_tokens" in response["replies"][0].meta["usage"]
21872179

@@ -2200,7 +2192,7 @@ async def test_live_run_async(self):
22002192
assert len(results["replies"]) == 1
22012193
message: ChatMessage = results["replies"][0]
22022194
assert "Paris" in message.text
2203-
assert "claude-sonnet-4-20250514" in message.meta["model"]
2195+
assert "claude-sonnet-4-5" in message.meta["model"]
22042196
assert message.meta["finish_reason"] == "stop"
22052197
assert "completion_tokens" in message.meta["usage"]
22062198

@@ -2236,7 +2228,7 @@ async def callback(chunk: StreamingChunk) -> None:
22362228
assert len(results["replies"]) == 1
22372229
message = results["replies"][0]
22382230
assert "paris" in message.text.lower()
2239-
assert "claude-sonnet-4-20250514" in message.meta["model"]
2231+
assert "claude-sonnet-4-5" in message.meta["model"]
22402232
assert message.meta["finish_reason"] == "stop"
22412233
assert "input_tokens" in message.meta["usage"]
22422234
assert "output_tokens" in message.meta["usage"]

integrations/anthropic/tests/test_vertex_chat_generator.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -208,7 +208,7 @@ async def test_live_run_async(self):
208208
assert len(results["replies"]) == 1
209209
message: ChatMessage = results["replies"][0]
210210
assert "Paris" in message.text
211-
assert "claude-sonnet-4-20250514" in message.meta["model"]
211+
assert "claude-sonnet-4-5" in message.meta["model"]
212212
assert message.meta["finish_reason"] == "stop"
213213

214214
# Anthropic messages API is similar for AnthropicVertex and Anthropic endpoint,

0 commit comments

Comments
 (0)