Skip to content

fix(openai-compatible): support openrouter reasoning field #6361

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 3 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions .changeset/smooth-radios-breathe.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
---
'@ai-sdk/openai-compatible': patch
---

support OpenRouter's reasoning field
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,8 @@ describe('config', () => {
describe('doGenerate', () => {
function prepareJsonResponse({
content = '',
reasoning_content = '',
reasoning_content,
reasoning,
tool_calls,
function_call,
usage = {
Expand All @@ -87,6 +88,7 @@ describe('doGenerate', () => {
}: {
content?: string;
reasoning_content?: string;
reasoning?: string;
tool_calls?: Array<{
id: string;
type: 'function';
Expand Down Expand Up @@ -133,6 +135,7 @@ describe('doGenerate', () => {
role: 'assistant',
content,
reasoning_content,
reasoning,
tool_calls,
function_call,
},
Expand Down Expand Up @@ -172,7 +175,7 @@ describe('doGenerate', () => {
expect(text).toStrictEqual('Hello, World!');
});

it('should extract reasoning content', async () => {
it('should extract reasoning content when incomming reasoning_content field', async () => {
prepareJsonResponse({
content: 'Hello, World!',
reasoning_content: 'This is the reasoning behind the response',
Expand All @@ -190,6 +193,41 @@ describe('doGenerate', () => {
);
});

it('should extract reasoning content when incomming reasoning field', async () => {
prepareJsonResponse({
content: 'Hello, World!',
reasoning: 'This is the reasoning behind the response',
});

const { text, reasoning } = await model.doGenerate({
inputFormat: 'prompt',
mode: { type: 'regular' },
prompt: TEST_PROMPT,
});

expect(text).toStrictEqual('Hello, World!');
expect(reasoning).toStrictEqual(
'This is the reasoning behind the response',
);
});

it('should extract reasoning content when incomming reasoning_content and reasoning fields', async () => {
prepareJsonResponse({
content: 'Hello, World!',
reasoning_content: '123',
reasoning: '456',
});

const { text, reasoning } = await model.doGenerate({
inputFormat: 'prompt',
mode: { type: 'regular' },
prompt: TEST_PROMPT,
});

expect(text).toStrictEqual('Hello, World!');
expect(reasoning).toStrictEqual('123');
});

it('should extract usage', async () => {
prepareJsonResponse({
content: '',
Expand Down Expand Up @@ -289,7 +327,7 @@ describe('doGenerate', () => {

expect(rawResponse?.headers).toStrictEqual({
// default headers:
'content-length': '335',
'content-length': '312',
'content-type': 'application/json',

// custom header
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -300,7 +300,10 @@ export class OpenAICompatibleChatLanguageModel implements LanguageModelV1 {

return {
text: choice.message.content ?? undefined,
reasoning: choice.message.reasoning_content ?? undefined,
reasoning:
choice.message.reasoning_content ??
choice.message.reasoning ??
undefined,
toolCalls: choice.message.tool_calls?.map(toolCall => ({
toolCallType: 'function',
toolCallId: toolCall.id ?? generateId(),
Expand Down Expand Up @@ -529,10 +532,10 @@ export class OpenAICompatibleChatLanguageModel implements LanguageModelV1 {
const delta = choice.delta;

// enqueue reasoning before text deltas:
if (delta.reasoning_content != null) {
if (delta.reasoning_content != null || delta.reasoning != null) {
controller.enqueue({
type: 'reasoning',
textDelta: delta.reasoning_content,
textDelta: delta.reasoning_content || delta.reasoning,
});
}

Expand Down Expand Up @@ -730,6 +733,7 @@ const OpenAICompatibleChatResponseSchema = z.object({
role: z.literal('assistant').nullish(),
content: z.string().nullish(),
reasoning_content: z.string().nullish(),
reasoning: z.string().nullish(),
tool_calls: z
.array(
z.object({
Expand Down Expand Up @@ -766,6 +770,7 @@ const createOpenAICompatibleChatChunkSchema = <ERROR_SCHEMA extends z.ZodType>(
role: z.enum(['assistant']).nullish(),
content: z.string().nullish(),
reasoning_content: z.string().nullish(),
reasoning: z.string().nullish(),
tool_calls: z
.array(
z.object({
Expand Down
Loading