Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 8 additions & 0 deletions lib/ruby_llm/providers/vertexai/chat.rb
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,14 @@ module Chat
def completion_url
"projects/#{@config.vertexai_project_id}/locations/#{@config.vertexai_location}/publishers/google/models/#{@model}:generateContent" # rubocop:disable Layout/LineLength
end

def render_payload(messages, **kwargs)
payload = super
payload[:contents] = payload[:contents].map do |content|
content[:role] == 'function' ? content.merge(role: 'user') : content
end
payload
end
end
end
end
Expand Down
45 changes: 45 additions & 0 deletions spec/ruby_llm/providers/vertex_ai_spec.rb
Original file line number Diff line number Diff line change
Expand Up @@ -36,4 +36,49 @@
end
end
end

describe '#render_payload' do
let(:location) { 'us-central1' }
let(:model) do
instance_double(RubyLLM::Model::Info, id: 'gemini-2.5-pro', max_tokens: nil, metadata: {})
end

it 'normalizes tool response content roles to user' do
tool_message = instance_double(
RubyLLM::Message,
role: :tool,
tool_call_id: 'call_1',
content: 'tool output'
)
user_message = instance_double(
RubyLLM::Message,
role: :user,
tool_call?: false,
tool_result?: false,
content: 'prompt'
)

payload = provider.send(:render_payload, [tool_message, user_message], tools: {}, temperature: nil, model: model)

expect(payload[:contents]).to eq(
[
{
role: 'user',
parts: [
{
functionResponse: {
name: 'call_1',
response: {
name: 'call_1',
content: [{ text: 'tool output' }]
}
}
}
]
},
{ role: 'user', parts: [{ text: 'prompt' }] }
]
)
end
end
end