Skip to content

Want an example of streaming with tool calling #402

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
ijt opened this issue May 19, 2025 · 2 comments
Open

Want an example of streaming with tool calling #402

ijt opened this issue May 19, 2025 · 2 comments

Comments

@ijt
Copy link

ijt commented May 19, 2025

I got a demo of this working and I would like to merge it if there is interest.

@ijt
Copy link
Author

ijt commented May 19, 2025

Here's examples/chat-completion-streaming-tool-calling/main.go in my repo, for reference:

package main

import (
	"context"
	"encoding/json"
	"fmt"
	"github.com/openai/openai-go"
)

func main() {
	client := openai.NewClient()

	ctx := context.Background()

	question := "What is the weather in New York City? Write a paragraph about it."

	print("> ")
	println(question)

	params := openai.ChatCompletionNewParams{
		Messages: []openai.ChatCompletionMessageParamUnion{
			openai.UserMessage(question),
		},
		Tools: []openai.ChatCompletionToolParam{
			{
				Function: openai.FunctionDefinitionParam{
					Name:        "get_weather",
					Description: openai.String("Get weather at the given location"),
					Parameters: openai.FunctionParameters{
						"type": "object",
						"properties": map[string]interface{}{
							"location": map[string]string{
								"type": "string",
							},
						},
						"required": []string{"location"},
					},
				},
			},
		},
		Seed:  openai.Int(0),
		Model: openai.ChatModelGPT4o,
	}

	// Make initial chat completion request with streaming
	stream := client.Chat.Completions.NewStreaming(ctx, params)
	acc := openai.ChatCompletionAccumulator{}

	// Stream and accumulate the first response
	fmt.Println("\nStreaming first response...")
	for stream.Next() {
		chunk := stream.Current()
		acc.AddChunk(chunk)

		// Display the content as it arrives
		if len(chunk.Choices) > 0 && chunk.Choices[0].Delta.Content != "" {
			print(chunk.Choices[0].Delta.Content)
		}

		// Check if a tool call has finished
		if tool, ok := acc.JustFinishedToolCall(); ok {
			fmt.Printf("\nTool call detected: %s with arguments %s\n", tool.Name, tool.Arguments)
		}
	}
	println()

	if err := stream.Err(); err != nil {
		panic(err)
	}

	// Access the accumulated message and tool calls
	var toolCalls []openai.ChatCompletionMessageToolCall
	if len(acc.Choices) > 0 {
		toolCalls = acc.Choices[0].Message.ToolCalls
	}

	// Return early if there are no tool calls
	if len(toolCalls) == 0 {
		fmt.Printf("No function call")
		return
	}

	// If there is a was a function call, continue the conversation
	params.Messages = append(params.Messages, acc.Choices[0].Message.ToParam())
	for _, toolCall := range toolCalls {
		if toolCall.Function.Name == "get_weather" {
			// Extract the location from the function call arguments
			var args map[string]interface{}
			err := json.Unmarshal([]byte(toolCall.Function.Arguments), &args)
			if err != nil {
				panic(err)
			}
			location := args["location"].(string)

			// Simulate getting weather data
			weatherData := getWeather(location)

			// Print the weather data
			fmt.Printf("Weather in %s: %s\n", location, weatherData)

			params.Messages = append(params.Messages, openai.ToolMessage(weatherData, toolCall.ID))
		}
	}

	responseStream := client.Chat.Completions.NewStreaming(ctx, params)

	fmt.Println("\nStreaming second response...")
	for responseStream.Next() {
		evt := responseStream.Current()
		if len(evt.Choices) > 0 {
			print(evt.Choices[0].Delta.Content)
		}
	}
	println()

	if err := responseStream.Err(); err != nil {
		panic(err)
	}
}

// Mock function to simulate weather data retrieval
func getWeather(location string) string {
	// In a real implementation, this function would call a weather API
	return "Sunny, 25°C"
}

@virtuald
Copy link

virtuald commented May 27, 2025

This example works for me, thanks!

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment
Labels
None yet
Projects
None yet
Development

No branches or pull requests

2 participants