Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions .changeset/eleven-bananas-grow.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
---
'@openai/agents-openai': patch
---

fix: #558 prompt parameter does not work when being passed via an Agent
1 change: 1 addition & 0 deletions examples/basic/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@
"start:stream-text": "tsx stream-text.ts",
"start:json-schema-output-type": "tsx json-schema-output-type.ts",
"start:tool-use-behavior": "tsx tool-use-behavior.ts",
"start:prompt-id": "tsx prompt-id.ts",
"start:tools": "tsx tools.ts",
"start:reasoning": "tsx reasoning.ts",
"start:local-file": "tsx local-file.ts",
Expand Down
8 changes: 3 additions & 5 deletions examples/basic/prompt-id.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,15 +4,13 @@ async function main() {
const agent = new Agent({
name: 'Assistant',
prompt: {
promptId: 'pmpt_684b3b772e648193b92404d7d0101d8a07f7a7903e519946',
promptId: 'pmpt_68d50b26524c81958c1425070180b5e10ab840669e470fc7',
version: '1',
variables: {
poem_style: 'limerick',
},
variables: { name: 'Kaz' },
},
});

const result = await run(agent, 'Write about unrequited love.');
const result = await run(agent, 'What is your name?');
console.log(result.finalOutput);
}

Expand Down
20 changes: 19 additions & 1 deletion packages/agents-openai/src/openaiResponsesModel.ts
Original file line number Diff line number Diff line change
Expand Up @@ -878,8 +878,8 @@ export class OpenAIResponsesModel implements Model {
}

const requestData = {
instructions: request.systemInstructions,
model: this.#model,
instructions: normalizeInstructions(request.systemInstructions),
input,
include,
tools,
Expand Down Expand Up @@ -1051,3 +1051,21 @@ export class OpenAIResponsesModel implements Model {
}
}
}

/**
* Sending an empty string for instructions can override the prompt parameter.
* Thus, this method checks if the instructions is an empty string and returns undefined if it is.
* @param instructions - The instructions to normalize.
* @returns The normalized instructions.
*/
function normalizeInstructions(
instructions: string | undefined,
): string | undefined {
if (typeof instructions === 'string') {
if (instructions.trim() === '') {
return undefined;
}
return instructions;
}
return undefined;
}
65 changes: 62 additions & 3 deletions packages/agents-openai/test/openaiResponsesModel.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ describe('OpenAIResponsesModel', () => {
setTracingDisabled(true);
});
it('getResponse returns correct ModelResponse and calls client with right parameters', async () => {
withTrace('test', async () => {
await withTrace('test', async () => {
const fakeResponse = {
id: 'res1',
usage: {
Expand Down Expand Up @@ -74,8 +74,67 @@ describe('OpenAIResponsesModel', () => {
});
});

it('normalizes systemInstructions so empty strings are omitted', async () => {
await withTrace('test', async () => {
const fakeResponse = {
id: 'res-empty-instructions',
usage: {
input_tokens: 0,
output_tokens: 0,
total_tokens: 0,
},
output: [],
};
for (const systemInstructions of ['', ' ']) {
const request = {
systemInstructions,
input: 'hello',
modelSettings: {},
tools: [],
outputType: 'text',
handoffs: [],
tracing: false,
signal: undefined,
};
const createMock = vi.fn().mockResolvedValue(fakeResponse);
await new OpenAIResponsesModel(
{ responses: { create: createMock } } as unknown as OpenAI,
'gpt-test',
).getResponse(request as any);

expect(createMock).toHaveBeenCalledTimes(1);
const [args] = createMock.mock.calls[0];
expect('instructions' in args).toBe(true);
expect(args.instructions).toBeUndefined();
}

for (const systemInstructions of [' a ', 'foo']) {
const request = {
systemInstructions,
input: 'hello',
modelSettings: {},
tools: [],
outputType: 'text',
handoffs: [],
tracing: false,
signal: undefined,
};
const createMock = vi.fn().mockResolvedValue(fakeResponse);
await new OpenAIResponsesModel(
{ responses: { create: createMock } } as unknown as OpenAI,
'gpt-test',
).getResponse(request as any);

expect(createMock).toHaveBeenCalledTimes(1);
const [args] = createMock.mock.calls[0];
expect('instructions' in args).toBe(true);
expect(args.instructions).toBe(systemInstructions);
}
});
});

it('merges top-level reasoning and text settings into provider data for Responses API', async () => {
withTrace('test', async () => {
await withTrace('test', async () => {
const fakeResponse = {
id: 'res-settings',
usage: {
Expand Down Expand Up @@ -134,7 +193,7 @@ describe('OpenAIResponsesModel', () => {
});

it('getStreamedResponse yields events and calls client with stream flag', async () => {
withTrace('test', async () => {
await withTrace('test', async () => {
const fakeResponse = { id: 'res2', usage: {}, output: [] };
const events: ResponseStreamEvent[] = [
{ type: 'response.created', response: fakeResponse as any },
Expand Down