diff --git a/.changeset/public-socks-travel.md b/.changeset/public-socks-travel.md new file mode 100644 index 00000000..4483f26b --- /dev/null +++ b/.changeset/public-socks-travel.md @@ -0,0 +1,5 @@ +--- +'@openai/agents-openai': patch +--- + +Fix a bug where responses api does not accept both outputType and verbosity parameter for gpt-5 diff --git a/examples/basic/hello-world-gpt-5.ts b/examples/basic/hello-world-gpt-5.ts index c94d8f50..fff60640 100644 --- a/examples/basic/hello-world-gpt-5.ts +++ b/examples/basic/hello-world-gpt-5.ts @@ -1,4 +1,11 @@ -import { Agent, run } from '@openai/agents'; +import { Agent, OpenAIChatCompletionsModel, run } from '@openai/agents'; +import OpenAI from 'openai'; +import { z } from 'zod'; + +const output = z.object({ + title: z.string(), + description: z.string(), +}); async function main() { const agent = new Agent({ @@ -11,10 +18,27 @@ async function main() { text: { verbosity: 'low' }, }, }, + outputType: output, }); - const result = await run(agent, 'Tell me about recursion in programming.'); + const prompt = 'Tell me about recursion in programming.'; + const result = await run(agent, prompt); console.log(result.finalOutput); + + const completionsAgent = new Agent({ + name: 'GPT-5 Assistant', + model: new OpenAIChatCompletionsModel(new OpenAI(), 'gpt-5'), + instructions: "You're a helpful assistant.", + modelSettings: { + providerData: { + reasoning_effort: 'minimal', + verbosity: 'low', + }, + }, + outputType: output, + }); + const completionsResult = await run(completionsAgent, prompt); + console.log(completionsResult.finalOutput); } if (require.main === module) { diff --git a/packages/agents-openai/src/openaiResponsesModel.ts b/packages/agents-openai/src/openaiResponsesModel.ts index f8ed1a11..e2173897 100644 --- a/packages/agents-openai/src/openaiResponsesModel.ts +++ b/packages/agents-openai/src/openaiResponsesModel.ts @@ -71,12 +71,14 @@ function getToolChoice( function getResponseFormat( outputType: SerializedOutputType, + otherProperties: Record | undefined, ): OpenAI.Responses.ResponseTextConfig | undefined { if (outputType === 'text') { - return undefined; + return otherProperties; } return { + ...otherProperties, format: outputType, }; } @@ -830,7 +832,9 @@ export class OpenAIResponsesModel implements Model { const input = getInputItems(request.input); const { tools, include } = getTools(request.tools, request.handoffs); const toolChoice = getToolChoice(request.modelSettings.toolChoice); - const responseFormat = getResponseFormat(request.outputType); + const { text, ...restOfProviderData } = + request.modelSettings.providerData ?? {}; + const responseFormat = getResponseFormat(request.outputType, text); const prompt = getPrompt(request.prompt); let parallelToolCalls: boolean | undefined = undefined; @@ -859,7 +863,7 @@ export class OpenAIResponsesModel implements Model { stream, text: responseFormat, store: request.modelSettings.store, - ...request.modelSettings.providerData, + ...restOfProviderData, }; if (logger.dontLogModelData) {