From 62e28a7eafd6edab5dd11946a28e43ea2b7c2964 Mon Sep 17 00:00:00 2001 From: Michael Bleigh Date: Sun, 12 May 2024 23:58:31 -0700 Subject: [PATCH 1/4] Fixes issue with context not passing through generate() --- js/ai/src/generate.ts | 1 + js/ai/src/model/middleware.ts | 4 ++-- js/pnpm-lock.yaml | 3 +++ js/samples/flow-simple-ai/package.json | 1 + .../prompts/dotpromptContext.prompt | 17 ++++++++++++++++ js/samples/flow-simple-ai/src/index.ts | 20 +++++++++++++++++++ 6 files changed, 44 insertions(+), 2 deletions(-) create mode 100644 js/samples/flow-simple-ai/prompts/dotpromptContext.prompt diff --git a/js/ai/src/generate.ts b/js/ai/src/generate.ts index 53523106a6..065624bb85 100755 --- a/js/ai/src/generate.ts +++ b/js/ai/src/generate.ts @@ -410,6 +410,7 @@ export async function toGenerateRequest( messages, candidates: options.candidates, config: options.config, + context: options.context, tools: tools?.map((tool) => toToolDefinition(tool)) || [], output: { format: diff --git a/js/ai/src/model/middleware.ts b/js/ai/src/model/middleware.ts index f15c794568..d62adb220d 100644 --- a/js/ai/src/model/middleware.ts +++ b/js/ai/src/model/middleware.ts @@ -198,13 +198,13 @@ const CONTEXT_ITEM_TEMPLATE = ( out += d.text() + '\n'; return out; }; + export function augmentWithContext( options?: AugmentWithContextOptions ): ModelMiddleware { const preface = - typeof options?.preface === 'undefined' ? CONTEXT_PREFACE : options.preface; + typeof options?.preface === 'undefined' ? CONTEXT_PREFACE : options.preface; const itemTemplate = options?.itemTemplate || CONTEXT_ITEM_TEMPLATE; - const citationKey = options?.citationKey; return (req, next) => { // if there is no context in the request, no-op if (!req.context?.length) return next(req); diff --git a/js/pnpm-lock.yaml b/js/pnpm-lock.yaml index a8d204cfa2..c9da7305c3 100644 --- a/js/pnpm-lock.yaml +++ b/js/pnpm-lock.yaml @@ -989,6 +989,9 @@ importers: '@genkit-ai/core': specifier: workspace:^ version: link:../../core + '@genkit-ai/dotprompt': + specifier: workspace:* + version: link:../../plugins/dotprompt '@genkit-ai/firebase': specifier: workspace:* version: link:../../plugins/firebase diff --git a/js/samples/flow-simple-ai/package.json b/js/samples/flow-simple-ai/package.json index dda05338cf..76780cf991 100644 --- a/js/samples/flow-simple-ai/package.json +++ b/js/samples/flow-simple-ai/package.json @@ -22,6 +22,7 @@ "@genkit-ai/google-cloud": "workspace:*", "@genkit-ai/googleai": "workspace:*", "@genkit-ai/vertexai": "workspace:*", + "@genkit-ai/dotprompt": "workspace:*", "@opentelemetry/sdk-trace-base": "^1.22.0", "firebase-admin": "^12.1.0", "zod": "^3.22.4" diff --git a/js/samples/flow-simple-ai/prompts/dotpromptContext.prompt b/js/samples/flow-simple-ai/prompts/dotpromptContext.prompt new file mode 100644 index 0000000000..809541f194 --- /dev/null +++ b/js/samples/flow-simple-ai/prompts/dotpromptContext.prompt @@ -0,0 +1,17 @@ +--- +model: vertexai/gemini-1.0-pro +input: + schema: + question: string +output: + format: json + schema: + answer: string, the answer to the question + id: string, the selected id of the saying + reasoning: string, why the saying applies to the question +--- + +You are a mystic wisdom bot designed to help people with their problems. Use the provided +sayings to answer the question. + +Question: {{question}} \ No newline at end of file diff --git a/js/samples/flow-simple-ai/src/index.ts b/js/samples/flow-simple-ai/src/index.ts index 97461ce57c..7559d9f2a0 100644 --- a/js/samples/flow-simple-ai/src/index.ts +++ b/js/samples/flow-simple-ai/src/index.ts @@ -15,8 +15,10 @@ */ import { generate, generateStream, retrieve } from '@genkit-ai/ai'; +import { defineRetriever } from '@genkit-ai/ai/retriever'; import { defineTool } from '@genkit-ai/ai/tool'; import { configureGenkit } from '@genkit-ai/core'; +import { dotprompt, prompt } from '@genkit-ai/dotprompt'; import { defineFirestoreRetriever, firebase } from '@genkit-ai/firebase'; import { defineFlow, run } from '@genkit-ai/flow'; import { googleCloud } from '@genkit-ai/google-cloud'; @@ -48,6 +50,7 @@ configureGenkit({ metricExportIntervalMillis: 5_000, }, }), + dotprompt(), ], flowStateStore: 'firebase', traceStore: 'firebase', @@ -230,3 +233,20 @@ Available Options:\n- ${docs.map((d) => `${d.metadata!.name}: ${d.text()}`).join return result.text(); } ); + +export const dotpromptContext = defineFlow({ + name: 'dotpromptContext', + inputSchema: z.string(), + outputSchema: z.object({answer: z.string(), id: z.string(), reasoning: z.string()}), +}, async (question: string) => { + const docs = [ + {content: [{text: 'an apple a day keeps the doctor away'}], metadata: {id: 'apple'}}, + {content: [{text: 'those who live in glass houses should not throw stones'}],metadata: {id: 'stone'}}, + {content: [{text: "if you don't have anything nice to say, don't say anything at all"}], metadata: {id: 'nice'}}, + ]; + + const result = await (await prompt('dotpromptContext')).generate({ + input: {question: question}, context: docs + }); + return result.output() as any; +}) \ No newline at end of file From a63d2dfed3b448f8844ecc3d3da20c37d72e9105 Mon Sep 17 00:00:00 2001 From: Michael Bleigh Date: Mon, 13 May 2024 00:01:40 -0700 Subject: [PATCH 2/4] format --- js/ai/src/model/middleware.ts | 2 +- js/samples/flow-simple-ai/src/index.ts | 58 ++++++++++++++++++-------- 2 files changed, 42 insertions(+), 18 deletions(-) diff --git a/js/ai/src/model/middleware.ts b/js/ai/src/model/middleware.ts index d62adb220d..b1edb369a6 100644 --- a/js/ai/src/model/middleware.ts +++ b/js/ai/src/model/middleware.ts @@ -203,7 +203,7 @@ export function augmentWithContext( options?: AugmentWithContextOptions ): ModelMiddleware { const preface = - typeof options?.preface === 'undefined' ? CONTEXT_PREFACE : options.preface; + typeof options?.preface === 'undefined' ? CONTEXT_PREFACE : options.preface; const itemTemplate = options?.itemTemplate || CONTEXT_ITEM_TEMPLATE; return (req, next) => { // if there is no context in the request, no-op diff --git a/js/samples/flow-simple-ai/src/index.ts b/js/samples/flow-simple-ai/src/index.ts index 7559d9f2a0..9f0bcea7ee 100644 --- a/js/samples/flow-simple-ai/src/index.ts +++ b/js/samples/flow-simple-ai/src/index.ts @@ -15,7 +15,6 @@ */ import { generate, generateStream, retrieve } from '@genkit-ai/ai'; -import { defineRetriever } from '@genkit-ai/ai/retriever'; import { defineTool } from '@genkit-ai/ai/tool'; import { configureGenkit } from '@genkit-ai/core'; import { dotprompt, prompt } from '@genkit-ai/dotprompt'; @@ -234,19 +233,44 @@ Available Options:\n- ${docs.map((d) => `${d.metadata!.name}: ${d.text()}`).join } ); -export const dotpromptContext = defineFlow({ - name: 'dotpromptContext', - inputSchema: z.string(), - outputSchema: z.object({answer: z.string(), id: z.string(), reasoning: z.string()}), -}, async (question: string) => { - const docs = [ - {content: [{text: 'an apple a day keeps the doctor away'}], metadata: {id: 'apple'}}, - {content: [{text: 'those who live in glass houses should not throw stones'}],metadata: {id: 'stone'}}, - {content: [{text: "if you don't have anything nice to say, don't say anything at all"}], metadata: {id: 'nice'}}, - ]; - - const result = await (await prompt('dotpromptContext')).generate({ - input: {question: question}, context: docs - }); - return result.output() as any; -}) \ No newline at end of file +export const dotpromptContext = defineFlow( + { + name: 'dotpromptContext', + inputSchema: z.string(), + outputSchema: z.object({ + answer: z.string(), + id: z.string(), + reasoning: z.string(), + }), + }, + async (question: string) => { + const docs = [ + { + content: [{ text: 'an apple a day keeps the doctor away' }], + metadata: { id: 'apple' }, + }, + { + content: [ + { text: 'those who live in glass houses should not throw stones' }, + ], + metadata: { id: 'stone' }, + }, + { + content: [ + { + text: "if you don't have anything nice to say, don't say anything at all", + }, + ], + metadata: { id: 'nice' }, + }, + ]; + + const result = await ( + await prompt('dotpromptContext') + ).generate({ + input: { question: question }, + context: docs, + }); + return result.output() as any; + } +); From b8ca543a4cf210a5fbdc9d90ae3113aaed37751b Mon Sep 17 00:00:00 2001 From: Michael Bleigh Date: Mon, 13 May 2024 00:13:41 -0700 Subject: [PATCH 3/4] test --- js/ai/tests/generate/generate_test.ts | 23 +++++++++++++++++++++++ 1 file changed, 23 insertions(+) diff --git a/js/ai/tests/generate/generate_test.ts b/js/ai/tests/generate/generate_test.ts index 430a8c4489..8334e8632a 100644 --- a/js/ai/tests/generate/generate_test.ts +++ b/js/ai/tests/generate/generate_test.ts @@ -260,6 +260,7 @@ describe('toGenerateRequest', () => { ], candidates: undefined, config: undefined, + context: undefined, tools: [], output: { format: 'text' }, }, @@ -278,6 +279,7 @@ describe('toGenerateRequest', () => { ], candidates: undefined, config: undefined, + context: undefined, tools: [ { name: 'tellAFunnyJoke', @@ -313,6 +315,7 @@ describe('toGenerateRequest', () => { ], candidates: undefined, config: undefined, + context: undefined, tools: [ { name: 'tellAFunnyJoke', @@ -365,6 +368,7 @@ describe('toGenerateRequest', () => { ], candidates: undefined, config: undefined, + context: undefined, tools: [], output: { format: 'text' }, }, @@ -387,10 +391,29 @@ describe('toGenerateRequest', () => { ], candidates: undefined, config: undefined, + context: undefined, tools: [], output: { format: 'text' }, }, }, + { + should: 'pass context through to the model', + prompt: { + model: 'vertexai/gemini-1.0-pro', + prompt: 'Tell a joke with context.', + context: [{content: [{text: 'context here'}]}], + }, + expectedOutput: { + messages: [ + { content: [{ text: 'Tell a joke with context.' }], role: 'user' }, + ], + candidates: undefined, + config: undefined, + context: [{content: [{text: 'context here'}]}], + tools: [], + output: { format: 'text' }, + } + }, ]; for (const test of testCases) { it(test.should, async () => { From c34187f728f1e1b648c9b91037536e63c7c28428 Mon Sep 17 00:00:00 2001 From: Michael Bleigh Date: Mon, 13 May 2024 00:23:01 -0700 Subject: [PATCH 4/4] format --- js/ai/tests/generate/generate_test.ts | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/js/ai/tests/generate/generate_test.ts b/js/ai/tests/generate/generate_test.ts index 8334e8632a..adb3edb6d6 100644 --- a/js/ai/tests/generate/generate_test.ts +++ b/js/ai/tests/generate/generate_test.ts @@ -401,7 +401,7 @@ describe('toGenerateRequest', () => { prompt: { model: 'vertexai/gemini-1.0-pro', prompt: 'Tell a joke with context.', - context: [{content: [{text: 'context here'}]}], + context: [{ content: [{ text: 'context here' }] }], }, expectedOutput: { messages: [ @@ -409,10 +409,10 @@ describe('toGenerateRequest', () => { ], candidates: undefined, config: undefined, - context: [{content: [{text: 'context here'}]}], + context: [{ content: [{ text: 'context here' }] }], tools: [], output: { format: 'text' }, - } + }, }, ]; for (const test of testCases) {