From 8217a53218c9f0e770f1f19d096cfbc6dfb8341e Mon Sep 17 00:00:00 2001 From: Yusuf Eren Date: Wed, 9 Jul 2025 20:47:01 +0300 Subject: [PATCH 1/2] feat: add and top_logprobs parameters to Responses API --- examples/responses/logprobs.ts | 38 ++++++++++++++++++++++++++++ examples/responses/non-stream.ts | 18 +++++++++++++ src/resources/responses/responses.ts | 8 +++++- 3 files changed, 63 insertions(+), 1 deletion(-) create mode 100644 examples/responses/logprobs.ts create mode 100644 examples/responses/non-stream.ts diff --git a/examples/responses/logprobs.ts b/examples/responses/logprobs.ts new file mode 100644 index 000000000..d5e6b506c --- /dev/null +++ b/examples/responses/logprobs.ts @@ -0,0 +1,38 @@ +#!/usr/bin/env -S npm run tsn -T + +import OpenAI from 'openai'; + +const openai = new OpenAI(); + +async function main() { + const result = await openai.responses + .create({ + model: 'gpt-4o-2024-08-06', + input: 'solve 8x + 31 = 2', + include: ['message.output_text.logprobs'], + top_logprobs: 20, + }); + + for(const output of result.output) { + if(output.type === 'message') { + const logprobs = output.content.filter(content => content.type === 'output_text') + for(const logprob of logprobs) { + if(logprob.type === 'output_text') { + + // Top Logprobs + console.log(logprob.logprobs?.[0]?.top_logprobs); + + // Token + console.log(logprob.logprobs?.[0]?.token); + + // Token Logprobs + console.log(logprob.logprobs?.[0]?.logprob); + + // Bytes + console.log(logprob.logprobs?.[0]?.bytes); + } + } + } + } +} +main(); diff --git a/examples/responses/non-stream.ts b/examples/responses/non-stream.ts new file mode 100644 index 000000000..0734502ff --- /dev/null +++ b/examples/responses/non-stream.ts @@ -0,0 +1,18 @@ +#!/usr/bin/env -S npm run tsn -T + +import OpenAI from 'openai'; + +const openai = new OpenAI(); + +async function main() { + const result = await openai.responses + .create({ + model: 'gpt-4o-2024-08-06', + input: 'solve 8x + 31 = 2', + include: ['message.output_text.logprobs'], + top_logprobs: 20, + }); + + console.log(result.output_text); +} +main(); diff --git a/src/resources/responses/responses.ts b/src/resources/responses/responses.ts index adf2aafb1..5aac7b0e7 100644 --- a/src/resources/responses/responses.ts +++ b/src/resources/responses/responses.ts @@ -1946,7 +1946,8 @@ export type ResponseIncludable = | 'message.input_image.image_url' | 'computer_call_output.output.image_url' | 'reasoning.encrypted_content' - | 'code_interpreter_call.outputs'; + | 'code_interpreter_call.outputs' + | 'message.output_text.logprobs'; /** * An event that is emitted when a response finishes as incomplete. @@ -4688,6 +4689,11 @@ export interface ResponseCreateParamsBase { */ include?: Array | null; + /** + * The number of top logprobs to include in the response. + */ + top_logprobs?: number | null; + /** * Text, image, or file inputs to the model, used to generate a response. * From d280e468437e9b05181dc7fffc007554b998b5db Mon Sep 17 00:00:00 2001 From: Yusuf Eren Date: Wed, 9 Jul 2025 21:06:47 +0300 Subject: [PATCH 2/2] feat: add logprobs and top_logprobs parameters to Responses API --- examples/responses/non-stream.ts | 2 -- 1 file changed, 2 deletions(-) diff --git a/examples/responses/non-stream.ts b/examples/responses/non-stream.ts index 0734502ff..dc8315533 100644 --- a/examples/responses/non-stream.ts +++ b/examples/responses/non-stream.ts @@ -9,8 +9,6 @@ async function main() { .create({ model: 'gpt-4o-2024-08-06', input: 'solve 8x + 31 = 2', - include: ['message.output_text.logprobs'], - top_logprobs: 20, }); console.log(result.output_text);