Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
38 changes: 24 additions & 14 deletions haystack/components/generators/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,8 @@
#
# SPDX-License-Identifier: Apache-2.0

from typing import Any, Dict

from openai.types.chat.chat_completion_chunk import ChoiceDeltaToolCall

from haystack.dataclasses import StreamingChunk
Expand All @@ -23,23 +25,31 @@ def print_streaming_chunk(chunk: StreamingChunk) -> None:
tool results.
"""
# Print tool call metadata if available (from ChatGenerator)
if chunk.meta.get("tool_calls"):
for tool_call in chunk.meta["tool_calls"]:
if isinstance(tool_call, ChoiceDeltaToolCall) and tool_call.function:
# print the tool name
if tool_call.function.name and not tool_call.function.arguments:
print("[TOOL CALL]\n", flush=True, end="")
print(f"Tool: {tool_call.function.name} ", flush=True, end="")
if tool_calls := chunk.meta.get("tool_calls"):
for tool_call in tool_calls:
# Convert to dict if tool_call is a ChoiceDeltaToolCall
tool_call_dict: Dict[str, Any] = (
tool_call.to_dict() if isinstance(tool_call, ChoiceDeltaToolCall) else tool_call
)

if function := tool_call_dict.get("function"):
if name := function.get("name"):
print("\n\n[TOOL CALL]\n", flush=True, end="")
print(f"Tool: {name} ", flush=True, end="")
print("\nArguments: ", flush=True, end="")

# print the tool arguments
if tool_call.function.arguments:
print(tool_call.function.arguments, flush=True, end="")
if arguments := function.get("arguments"):
print(arguments, flush=True, end="")

# Print tool call results if available (from ToolInvoker)
if chunk.meta.get("tool_result"):
print(f"\n\n[TOOL RESULT]\n{chunk.meta['tool_result']}\n\n", flush=True, end="")
if tool_result := chunk.meta.get("tool_result"):
print(f"\n\n[TOOL RESULT]\n{tool_result}", flush=True, end="")

# Print the main content of the chunk (from ChatGenerator)
if chunk.content:
print(chunk.content, flush=True, end="")
if content := chunk.content:
print(content, flush=True, end="")

# End of LLM assistant message so we add two new lines
# This ensures spacing between multiple LLM messages (e.g. Agent)
if chunk.meta.get("finish_reason") is not None:
print("\n\n", flush=True, end="")
8 changes: 8 additions & 0 deletions haystack/components/tools/tool_invoker.py
Original file line number Diff line number Diff line change
Expand Up @@ -507,6 +507,10 @@ def run(
)
)

# We stream one more chunk that contains a finish_reason if tool_messages were generated
if len(tool_messages) > 0 and streaming_callback is not None:
streaming_callback(StreamingChunk(content="", meta={"finish_reason": "tool_call_results"}))

return {"tool_messages": tool_messages, "state": state}

@component.output_types(tool_messages=List[ChatMessage], state=State)
Expand Down Expand Up @@ -608,6 +612,10 @@ async def run_async(
)
) # type: ignore[misc] # we have checked that streaming_callback is not None and async

# We stream one more chunk that contains a finish_reason if tool_messages were generated
if len(tool_messages) > 0 and streaming_callback is not None:
await streaming_callback(StreamingChunk(content="", meta={"finish_reason": "tool_call_results"})) # type: ignore[misc] # we have checked that streaming_callback is not None and async

return {"tool_messages": tool_messages, "state": state}

def to_dict(self) -> Dict[str, Any]:
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
---
enhancements:
- |
- Updates StreamingChunk construction in ToolInvoker to also stream a chunk with a finish reason. This is useful when using the print_streaming_chunk utility method
- Update the print_streaming_chunk to have better formatting of messages especially when using it with Agent.
- Also updated to work with the current version of the AWS Bedrock integration by working with the dict representation of ChoiceDeltaToolCall
Loading