From 9dd3a592beebf11528647143f467a9369c35226a Mon Sep 17 00:00:00 2001 From: Christina Holland Date: Thu, 13 Nov 2025 11:48:01 -0800 Subject: [PATCH 1/2] Fix `generateContentStream` returning wrong inferenceSource. --- .changeset/gorgeous-rice-carry.md | 5 +++++ packages/ai/src/methods/generate-content.ts | 6 +++++- 2 files changed, 10 insertions(+), 1 deletion(-) create mode 100644 .changeset/gorgeous-rice-carry.md diff --git a/.changeset/gorgeous-rice-carry.md b/.changeset/gorgeous-rice-carry.md new file mode 100644 index 00000000000..80d58cede53 --- /dev/null +++ b/.changeset/gorgeous-rice-carry.md @@ -0,0 +1,5 @@ +--- +'@firebase/ai': patch +--- + +Fix `generateContentStream` returning wrong `inferenceSource`. diff --git a/packages/ai/src/methods/generate-content.ts b/packages/ai/src/methods/generate-content.ts index fc6eac15c74..359b6f9b7ac 100644 --- a/packages/ai/src/methods/generate-content.ts +++ b/packages/ai/src/methods/generate-content.ts @@ -70,7 +70,11 @@ export async function generateContentStream( () => generateContentStreamOnCloud(apiSettings, model, params, requestOptions) ); - return processStream(callResult.response, apiSettings); // TODO: Map streaming responses + return processStream( + callResult.response, + apiSettings, + callResult.inferenceSource + ); // TODO: Map streaming responses } async function generateContentOnCloud( From aa73fa536d9866cbc2d178f239c1081b70c2876e Mon Sep 17 00:00:00 2001 From: Christina Holland Date: Thu, 13 Nov 2025 11:56:50 -0800 Subject: [PATCH 2/2] add tests --- .../ai/src/methods/generate-content.test.ts | 31 +++++++++++++++++-- 1 file changed, 29 insertions(+), 2 deletions(-) diff --git a/packages/ai/src/methods/generate-content.test.ts b/packages/ai/src/methods/generate-content.test.ts index 8a274c24417..be94a7b29a0 100644 --- a/packages/ai/src/methods/generate-content.test.ts +++ b/packages/ai/src/methods/generate-content.test.ts @@ -26,6 +26,7 @@ import { import * as request from '../requests/request'; import { generateContent, + generateContentStream, templateGenerateContent, templateGenerateContentStream } from './generate-content'; @@ -35,6 +36,7 @@ import { HarmBlockMethod, HarmBlockThreshold, HarmCategory, + InferenceSource, Language, Outcome } from '../types'; @@ -548,8 +550,7 @@ describe('generateContent()', () => { ); }); }); - // TODO: define a similar test for generateContentStream - it('on-device', async () => { + it('generateContent on-device', async () => { const chromeAdapter = fakeChromeAdapter; const isAvailableStub = stub(chromeAdapter, 'isAvailable').resolves(true); const mockResponse = getMockResponse( @@ -566,9 +567,35 @@ describe('generateContent()', () => { chromeAdapter ); expect(result.response.text()).to.include('Mountain View, California'); + expect(result.response.inferenceSource).to.equal(InferenceSource.ON_DEVICE); expect(isAvailableStub).to.be.called; expect(generateContentStub).to.be.calledWith(fakeRequestParams); }); + it('generateContentStream on-device', async () => { + const chromeAdapter = fakeChromeAdapter; + const isAvailableStub = stub(chromeAdapter, 'isAvailable').resolves(true); + const mockResponse = getMockResponseStreaming( + 'vertexAI', + 'streaming-success-basic-reply-short.txt' + ); + const generateContentStreamStub = stub( + chromeAdapter, + 'generateContentStream' + ).resolves(mockResponse as Response); + const result = await generateContentStream( + fakeApiSettings, + 'model', + fakeRequestParams, + chromeAdapter + ); + const aggregatedResponse = await result.response; + expect(aggregatedResponse.text()).to.include('Cheyenne'); + expect(aggregatedResponse.inferenceSource).to.equal( + InferenceSource.ON_DEVICE + ); + expect(isAvailableStub).to.be.called; + expect(generateContentStreamStub).to.be.calledWith(fakeRequestParams); + }); }); describe('templateGenerateContent', () => {