Skip to content

Commit 6f5b745

Browse files
authored
feat: Improve formatting in print streaming chunk (#9431)
* Improve formatting in print streaming chunk * Add reno * Fix mypy * PR comments
1 parent 720cc19 commit 6f5b745

File tree

3 files changed

+38
-14
lines changed

3 files changed

+38
-14
lines changed

haystack/components/generators/utils.py

Lines changed: 24 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,8 @@
22
#
33
# SPDX-License-Identifier: Apache-2.0
44

5+
from typing import Any, Dict
6+
57
from openai.types.chat.chat_completion_chunk import ChoiceDeltaToolCall
68

79
from haystack.dataclasses import StreamingChunk
@@ -23,23 +25,31 @@ def print_streaming_chunk(chunk: StreamingChunk) -> None:
2325
tool results.
2426
"""
2527
# Print tool call metadata if available (from ChatGenerator)
26-
if chunk.meta.get("tool_calls"):
27-
for tool_call in chunk.meta["tool_calls"]:
28-
if isinstance(tool_call, ChoiceDeltaToolCall) and tool_call.function:
29-
# print the tool name
30-
if tool_call.function.name and not tool_call.function.arguments:
31-
print("[TOOL CALL]\n", flush=True, end="")
32-
print(f"Tool: {tool_call.function.name} ", flush=True, end="")
28+
if tool_calls := chunk.meta.get("tool_calls"):
29+
for tool_call in tool_calls:
30+
# Convert to dict if tool_call is a ChoiceDeltaToolCall
31+
tool_call_dict: Dict[str, Any] = (
32+
tool_call.to_dict() if isinstance(tool_call, ChoiceDeltaToolCall) else tool_call
33+
)
34+
35+
if function := tool_call_dict.get("function"):
36+
if name := function.get("name"):
37+
print("\n\n[TOOL CALL]\n", flush=True, end="")
38+
print(f"Tool: {name} ", flush=True, end="")
3339
print("\nArguments: ", flush=True, end="")
3440

35-
# print the tool arguments
36-
if tool_call.function.arguments:
37-
print(tool_call.function.arguments, flush=True, end="")
41+
if arguments := function.get("arguments"):
42+
print(arguments, flush=True, end="")
3843

3944
# Print tool call results if available (from ToolInvoker)
40-
if chunk.meta.get("tool_result"):
41-
print(f"\n\n[TOOL RESULT]\n{chunk.meta['tool_result']}\n\n", flush=True, end="")
45+
if tool_result := chunk.meta.get("tool_result"):
46+
print(f"\n\n[TOOL RESULT]\n{tool_result}", flush=True, end="")
4247

4348
# Print the main content of the chunk (from ChatGenerator)
44-
if chunk.content:
45-
print(chunk.content, flush=True, end="")
49+
if content := chunk.content:
50+
print(content, flush=True, end="")
51+
52+
# End of LLM assistant message so we add two new lines
53+
# This ensures spacing between multiple LLM messages (e.g. Agent)
54+
if chunk.meta.get("finish_reason") is not None:
55+
print("\n\n", flush=True, end="")

haystack/components/tools/tool_invoker.py

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -508,6 +508,10 @@ def run(
508508
)
509509
)
510510

511+
# We stream one more chunk that contains a finish_reason if tool_messages were generated
512+
if len(tool_messages) > 0 and streaming_callback is not None:
513+
streaming_callback(StreamingChunk(content="", meta={"finish_reason": "tool_call_results"}))
514+
511515
return {"tool_messages": tool_messages, "state": state}
512516

513517
@component.output_types(tool_messages=List[ChatMessage], state=State)
@@ -609,6 +613,10 @@ async def run_async(
609613
)
610614
) # type: ignore[misc] # we have checked that streaming_callback is not None and async
611615

616+
# We stream one more chunk that contains a finish_reason if tool_messages were generated
617+
if len(tool_messages) > 0 and streaming_callback is not None:
618+
await streaming_callback(StreamingChunk(content="", meta={"finish_reason": "tool_call_results"})) # type: ignore[misc] # we have checked that streaming_callback is not None and async
619+
612620
return {"tool_messages": tool_messages, "state": state}
613621

614622
def to_dict(self) -> Dict[str, Any]:
Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
---
2+
enhancements:
3+
- |
4+
- Updates StreamingChunk construction in ToolInvoker to also stream a chunk with a finish reason. This is useful when using the print_streaming_chunk utility method
5+
- Update the print_streaming_chunk to have better formatting of messages especially when using it with Agent.
6+
- Also updated to work with the current version of the AWS Bedrock integration by working with the dict representation of ChoiceDeltaToolCall

0 commit comments

Comments
 (0)