diff --git a/extensions/copilot/src/platform/endpoint/node/responsesApi.ts b/extensions/copilot/src/platform/endpoint/node/responsesApi.ts index 1cb9602d145e9..631866a49f3c5 100644 --- a/extensions/copilot/src/platform/endpoint/node/responsesApi.ts +++ b/extensions/copilot/src/platform/endpoint/node/responsesApi.ts @@ -841,6 +841,8 @@ export class OpenAIResponsesProcessor { private sawCompactionMessage = false; private latestCompactionOutputIndex: number | undefined; private latestCompactionItem: OpenAIContextManagementResponse | undefined; + /** Tracks the output_index of the last text delta to detect output item boundaries */ + private lastTextDeltaOutputIndex: number | undefined; /** Maps output_index to { name, callId, arguments } for streaming tool call updates */ private readonly toolCallInfo = new Map(); @@ -915,6 +917,12 @@ export class OpenAIResponsesProcessor { return onProgress({ text: '', copilotErrors: [{ agent: 'openai', code: chunk.code || 'unknown', message: chunk.message, type: 'error', identifier: chunk.param || undefined }] }); case 'response.output_text.delta': { const capiChunk: CapiResponsesTextDeltaEvent = chunk; + // When text arrives from a new output item, emit a paragraph + // separator so that e.g. commentary and final text don't fuse. + if (this.lastTextDeltaOutputIndex !== undefined && capiChunk.output_index !== this.lastTextDeltaOutputIndex) { + onProgress({ text: '\n\n' }); + } + this.lastTextDeltaOutputIndex = capiChunk.output_index; const haystack = new Lazy(() => new TextEncoder().encode(capiChunk.delta)); return onProgress({ text: capiChunk.delta, diff --git a/extensions/copilot/src/platform/endpoint/node/test/responsesApi.spec.ts b/extensions/copilot/src/platform/endpoint/node/test/responsesApi.spec.ts index cca95b8a8bc67..6b7811105a63b 100644 --- a/extensions/copilot/src/platform/endpoint/node/test/responsesApi.spec.ts +++ b/extensions/copilot/src/platform/endpoint/node/test/responsesApi.spec.ts @@ -1184,3 +1184,107 @@ describe('summarizedAtRoundId and stateful marker interaction', () => { services.dispose(); }); }); + +describe('phase commentary followed by phase final_answer', () => { + it('inserts a separator between commentary and final_answer text in the stream', async () => { + const services = createPlatformServices(); + const accessor = services.createTestingAccessor(); + const instantiationService = accessor.get(IInstantiationService); + const logService = accessor.get(ILogService); + const telemetryService = new SpyingTelemetryService(); + const accumulatedTexts: string[] = []; + const phases: string[] = []; + + const commentaryText = 'Responding directly in commentary as requested. My name is GitHub Copilot.'; + const finalText = 'My name is GitHub Copilot.'; + + // Real-world Responses API stream: commentary message (output_index 0) + // followed by final_answer message (output_index 1), with incremental + // text deltas for each. + const events = [ + { type: 'response.output_item.added', output_index: 0, item: { type: 'message', role: 'assistant', content: [], phase: 'commentary', status: 'in_progress' }, sequence_number: 2 }, + { type: 'response.content_part.added', output_index: 0, content_index: 0, item_id: 'item-0', part: { type: 'output_text', text: '', annotations: [], logprobs: [] }, sequence_number: 3 }, + { type: 'response.output_text.delta', output_index: 0, content_index: 0, item_id: 'item-0', delta: 'Respond', logprobs: [], sequence_number: 4 }, + { type: 'response.output_text.delta', output_index: 0, content_index: 0, item_id: 'item-0', delta: 'ing', logprobs: [], sequence_number: 5 }, + { type: 'response.output_text.delta', output_index: 0, content_index: 0, item_id: 'item-0', delta: ' directly', logprobs: [], sequence_number: 6 }, + { type: 'response.output_text.delta', output_index: 0, content_index: 0, item_id: 'item-0', delta: ' in', logprobs: [], sequence_number: 7 }, + { type: 'response.output_text.delta', output_index: 0, content_index: 0, item_id: 'item-0', delta: ' commentary', logprobs: [], sequence_number: 8 }, + { type: 'response.output_text.delta', output_index: 0, content_index: 0, item_id: 'item-0', delta: ' as', logprobs: [], sequence_number: 9 }, + { type: 'response.output_text.delta', output_index: 0, content_index: 0, item_id: 'item-0', delta: ' requested', logprobs: [], sequence_number: 10 }, + { type: 'response.output_text.delta', output_index: 0, content_index: 0, item_id: 'item-0', delta: '.', logprobs: [], sequence_number: 11 }, + { type: 'response.output_text.delta', output_index: 0, content_index: 0, item_id: 'item-0', delta: ' My', logprobs: [], sequence_number: 12 }, + { type: 'response.output_text.delta', output_index: 0, content_index: 0, item_id: 'item-0', delta: ' name', logprobs: [], sequence_number: 13 }, + { type: 'response.output_text.delta', output_index: 0, content_index: 0, item_id: 'item-0', delta: ' is', logprobs: [], sequence_number: 14 }, + { type: 'response.output_text.delta', output_index: 0, content_index: 0, item_id: 'item-0', delta: ' Git', logprobs: [], sequence_number: 15 }, + { type: 'response.output_text.delta', output_index: 0, content_index: 0, item_id: 'item-0', delta: 'Hub', logprobs: [], sequence_number: 16 }, + { type: 'response.output_text.delta', output_index: 0, content_index: 0, item_id: 'item-0', delta: ' Cop', logprobs: [], sequence_number: 17 }, + { type: 'response.output_text.delta', output_index: 0, content_index: 0, item_id: 'item-0', delta: 'ilot', logprobs: [], sequence_number: 18 }, + { type: 'response.output_text.delta', output_index: 0, content_index: 0, item_id: 'item-0', delta: '.', logprobs: [], sequence_number: 19 }, + { type: 'response.output_text.done', output_index: 0, content_index: 0, item_id: 'item-0', text: commentaryText, logprobs: [], sequence_number: 20 }, + { type: 'response.content_part.done', output_index: 0, content_index: 0, item_id: 'item-0', part: { type: 'output_text', text: commentaryText, annotations: [], logprobs: [] }, sequence_number: 21 }, + { type: 'response.output_item.done', output_index: 0, item: { type: 'message', role: 'assistant', content: [{ type: 'output_text', text: commentaryText, annotations: [], logprobs: [] }], phase: 'commentary', status: 'completed' }, sequence_number: 22 }, + { type: 'response.output_item.added', output_index: 1, item: { type: 'message', role: 'assistant', content: [], phase: 'final_answer', status: 'in_progress' }, sequence_number: 23 }, + { type: 'response.content_part.added', output_index: 1, content_index: 0, item_id: 'item-1', part: { type: 'output_text', text: '', annotations: [], logprobs: [] }, sequence_number: 24 }, + { type: 'response.output_text.delta', output_index: 1, content_index: 0, item_id: 'item-1', delta: 'My', logprobs: [], sequence_number: 25 }, + { type: 'response.output_text.delta', output_index: 1, content_index: 0, item_id: 'item-1', delta: ' name', logprobs: [], sequence_number: 26 }, + { type: 'response.output_text.delta', output_index: 1, content_index: 0, item_id: 'item-1', delta: ' is', logprobs: [], sequence_number: 27 }, + { type: 'response.output_text.delta', output_index: 1, content_index: 0, item_id: 'item-1', delta: ' Git', logprobs: [], sequence_number: 28 }, + { type: 'response.output_text.delta', output_index: 1, content_index: 0, item_id: 'item-1', delta: 'Hub', logprobs: [], sequence_number: 29 }, + { type: 'response.output_text.delta', output_index: 1, content_index: 0, item_id: 'item-1', delta: ' Cop', logprobs: [], sequence_number: 30 }, + { type: 'response.output_text.delta', output_index: 1, content_index: 0, item_id: 'item-1', delta: 'ilot', logprobs: [], sequence_number: 31 }, + { type: 'response.output_text.delta', output_index: 1, content_index: 0, item_id: 'item-1', delta: '.', logprobs: [], sequence_number: 32 }, + { type: 'response.output_text.done', output_index: 1, content_index: 0, item_id: 'item-1', text: finalText, logprobs: [], sequence_number: 33 }, + { type: 'response.content_part.done', output_index: 1, content_index: 0, item_id: 'item-1', part: { type: 'output_text', text: finalText, annotations: [], logprobs: [] }, sequence_number: 34 }, + { type: 'response.output_item.done', output_index: 1, item: { type: 'message', role: 'assistant', content: [{ type: 'output_text', text: finalText, annotations: [], logprobs: [] }], phase: 'final_answer', status: 'completed' }, sequence_number: 35 }, + { + type: 'response.completed', + response: { + id: 'resp_phase_test', + model: 'gpt-5.4-2026-03-05', + created_at: 1776962259, + usage: { input_tokens: 8432, output_tokens: 35, total_tokens: 8467, input_tokens_details: { cached_tokens: 0 }, output_tokens_details: { reasoning_tokens: 0 } }, + output: [ + { type: 'message', content: [{ type: 'output_text', text: commentaryText, annotations: [], logprobs: [] }], phase: 'commentary', role: 'assistant', status: 'completed' }, + { type: 'message', content: [{ type: 'output_text', text: finalText, annotations: [], logprobs: [] }], phase: 'final_answer', role: 'assistant', status: 'completed' }, + ], + }, + sequence_number: 36, + } + ]; + + const sseBody = events.map(e => `data: ${JSON.stringify(e)}\n\n`).join(''); + const response = createFakeStreamResponse(sseBody); + const telemetryData = TelemetryData.createAndMarkAsIssued({ modelCallId: 'model-call-phase-test' }, {}); + + const stream = await processResponseFromChatEndpoint( + instantiationService, + telemetryService, + logService, + response, + 1, + async (text, _unused, delta) => { + accumulatedTexts.push(text); + if (delta.phase) { + phases.push(delta.phase); + } + return undefined; + }, + telemetryData, + ); + + for await (const _ of stream) { + // consume stream + } + + expect(phases).toEqual(['commentary', 'final_answer']); + + // The accumulated text must separate commentary and final_answer text + const finalAccumulatedText = accumulatedTexts[accumulatedTexts.length - 1]; + expect(finalAccumulatedText).toBe( + commentaryText + '\n\n' + finalText + ); + + accessor.dispose(); + services.dispose(); + }); +});