Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
132 changes: 132 additions & 0 deletions packages/providers/src/index.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -304,3 +304,135 @@ describe('complete', () => {
).rejects.toMatchObject({ code: 'ATTACHMENT_TOO_LARGE' });
});
});

describe('complete — openai-responses strict instructions', () => {
it('injects top-level instructions and strips system/developer input items via onPayload', async () => {
getModelMock.mockReturnValue({
id: 'gpt-5.1',
api: 'openai-responses',
provider: 'openai',
});

let capturedOnPayload:
| ((payload: unknown) => unknown | Promise<unknown | undefined>)
| undefined;

completeSimpleMock.mockImplementationOnce(async (_model, _context, opts) => {
capturedOnPayload = opts.onPayload;
return {
role: 'assistant',
content: [{ type: 'text', text: 'ok' }],
api: 'openai-responses',
provider: 'openai',
model: 'gpt-5.1',
usage: {
input: 1,
output: 1,
cacheRead: 0,
cacheWrite: 0,
totalTokens: 2,
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0, total: 0 },
},
stopReason: 'stop',
timestamp: Date.now(),
};
});

await complete(
{ provider: 'openai', modelId: 'gpt-5.1' },
[
{ role: 'system', content: 'You are open-codesign.' },
{ role: 'user', content: 'hi' },
],
{ apiKey: 'sk-test' },
);

expect(capturedOnPayload).toBeDefined();

const params = {
input: [
{ role: 'system', content: 'ignored' },
{ role: 'developer', content: 'ignored' },
{ role: 'user', content: [{ type: 'input_text', text: 'hi' }] },
],
};
const mutated = (await capturedOnPayload?.(params)) as {
instructions?: string;
input: Array<{ role: string }>;
};

expect(mutated.instructions).toBe('You are open-codesign.');
expect(mutated.input.map((entry) => entry.role)).toEqual(['user']);
});

it('does not attach onPayload when systemPrompt is empty', async () => {
getModelMock.mockReturnValue({
id: 'gpt-5.1',
api: 'openai-responses',
provider: 'openai',
});

completeSimpleMock.mockImplementationOnce(async (_model, _context, opts) => {
expect(opts.onPayload).toBeUndefined();
return {
role: 'assistant',
content: [{ type: 'text', text: 'ok' }],
api: 'openai-responses',
provider: 'openai',
model: 'gpt-5.1',
usage: {
input: 1,
output: 1,
cacheRead: 0,
cacheWrite: 0,
totalTokens: 2,
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0, total: 0 },
},
stopReason: 'stop',
timestamp: Date.now(),
};
});

await complete({ provider: 'openai', modelId: 'gpt-5.1' }, [{ role: 'user', content: 'hi' }], {
apiKey: 'sk-test',
});
});

it('does not attach onPayload for anthropic-messages wire even with systemPrompt', async () => {
getModelMock.mockReturnValue({
id: 'claude-4.7-sonnet',
api: 'anthropic-messages',
provider: 'anthropic',
});

completeSimpleMock.mockImplementationOnce(async (_model, _context, opts) => {
expect(opts.onPayload).toBeUndefined();
return {
role: 'assistant',
content: [{ type: 'text', text: 'ok' }],
api: 'anthropic-messages',
provider: 'anthropic',
model: 'claude-4.7-sonnet',
usage: {
input: 1,
output: 1,
cacheRead: 0,
cacheWrite: 0,
totalTokens: 2,
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0, total: 0 },
},
stopReason: 'stop',
timestamp: Date.now(),
};
});

await complete(
{ provider: 'anthropic', modelId: 'claude-4.7-sonnet' },
[
{ role: 'system', content: 'You are open-codesign.' },
{ role: 'user', content: 'hi' },
],
{ apiKey: 'sk-ant-test' },
);
});
});
28 changes: 27 additions & 1 deletion packages/providers/src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -233,6 +233,7 @@ export async function complete(
maxTokens?: number;
reasoning?: ReasoningLevel;
headers?: Record<string, string>;
onPayload?: (payload: unknown) => unknown;
},
) => Promise<PiAssistantMessage>;
};
Expand All @@ -251,13 +252,16 @@ export async function complete(
}
}

const piContext = toPiContext(messages, piModel, opts);

const piOpts: {
apiKey: string;
baseUrl?: string;
signal?: AbortSignal;
maxTokens?: number;
reasoning?: ReasoningLevel;
headers?: Record<string, string>;
onPayload?: (payload: unknown) => unknown;
} = {
apiKey,
};
Expand All @@ -267,6 +271,28 @@ export async function complete(
if (opts.reasoning !== undefined) piOpts.reasoning = opts.reasoning;
if (opts.httpHeaders !== undefined) piOpts.headers = { ...opts.httpHeaders };

// Strict OpenAI-Responses gateways (e.g. sub2api-style routers) 400 when
// they see BOTH a system/developer item in `input[]` AND no top-level
// `instructions`. pi-ai's plain `openai-responses` wire injects the former
// but not the latter, so we mirror the codex wire's strict behavior here:
// set `instructions` and strip system/developer entries from `input[]`.
if (piModel.api === 'openai-responses' && piContext.systemPrompt) {
const systemPrompt = piContext.systemPrompt;
piOpts.onPayload = (payload) => {
const params = payload as {
instructions?: string;
input?: Array<{ role?: string }>;
};
params.instructions = systemPrompt;
if (Array.isArray(params.input)) {
params.input = params.input.filter(
(entry) => entry.role !== 'system' && entry.role !== 'developer',
);
}
return params;
};
}

// sub2api / claude2api gateways 403 requests without claude-cli identity
// headers. pi-ai only injects those on OAuth tokens — paste a
// sub2api-issued key and you hit the plain API-key branch. Force the
Expand All @@ -280,7 +306,7 @@ export async function complete(
}

validateCodexImageInputs(opts);
const result = await pi.completeSimple(piModel, toPiContext(messages, piModel, opts), piOpts);
const result = await pi.completeSimple(piModel, piContext, piOpts);

if (result.stopReason === 'error') {
throw new CodesignError(
Expand Down
Loading