From c605c4f7ea1021190bd3ee6f2ac3962c0a2cd34b Mon Sep 17 00:00:00 2001 From: Max Lord Date: Thu, 2 May 2024 12:06:34 -0400 Subject: [PATCH] Lifting definePrompt to genkit core - Moving definePrompt into ai package - Reimagining dotprompt to follow the new contract - Adding renderPrompt veneer to facilitate using prompts in code - Updating tests and samples to use defineDotprompt. - Adding new docs for prompts and updating dotprompt --- docs/dotprompt.md | 11 +- docs/prompts.md | 106 ++++++++++ js/ai/src/index.ts | 2 + js/ai/src/prompt.ts | 102 ++++++++++ js/dotprompt/src/index.ts | 34 +--- js/dotprompt/src/prompt.ts | 183 +++++------------- js/dotprompt/src/registry.ts | 27 ++- js/dotprompt/tests/prompt_test.ts | 117 +++++------ js/samples/coffee-shop/src/index.ts | 6 +- js/samples/dev-ui-gallery/src/main/prompts.ts | 6 +- js/samples/menu/src/01/prompts.ts | 35 +++- js/samples/menu/src/02/flows.ts | 21 +- js/samples/menu/src/02/prompts.ts | 11 +- js/samples/menu/src/03/prompts.ts | 4 +- js/samples/menu/src/04/prompts.ts | 4 +- js/samples/menu/src/05/prompts.ts | 6 +- js/samples/menu/src/index.ts | 2 +- js/samples/prompt-file/prompts/story.prompt | 6 +- js/samples/prompt-file/src/index.ts | 54 +++--- js/samples/rag/src/prompt.ts | 4 +- 20 files changed, 422 insertions(+), 319 deletions(-) create mode 100644 docs/prompts.md create mode 100644 js/ai/src/prompt.ts diff --git a/docs/dotprompt.md b/docs/dotprompt.md index 5d91128bf2..4d914e80df 100644 --- a/docs/dotprompt.md +++ b/docs/dotprompt.md @@ -3,11 +3,6 @@ Firebase Genkit provides the Dotprompt library and text format to help you write and organize your generative AI prompts. -Prompt manipulation is the primary way that you, as an app developer, influence -the output of generative AI models. For example, when using LLMs, you can craft -prompts that influence the tone, format, length, and other characteristics of -the models’ responses. - Dotprompt is designed around the premise that _prompts are code_. You write and maintain your prompts in specially-formatted files called dotprompt files, track changes to them using the same version control system that you use for your @@ -278,7 +273,7 @@ are a few other ways to load and define prompts: - `loadPromptFile`: Load a prompt from a file in the prompt directory. - `loadPromptUrl`: Load a prompt from a URL. -- `definePrompt`: Define a prompt in code. +- `defineDotprompt`: Define a prompt in code. Examples: @@ -286,7 +281,7 @@ Examples: import { loadPromptFile, loadPromptUrl, - definePrompt, + defineDotprompt, } from '@genkit-ai/dotprompt'; import { z } from 'zod'; @@ -297,7 +292,7 @@ const myPrompt = await loadPromptFile('./path/to/my_prompt.prompt'); const myPrompt = await loadPromptUrl('https://example.com/my_prompt.prompt'); // Define a prompt in code -const myPrompt = definePrompt( +const myPrompt = defineDotprompt( { model: 'vertexai/gemini-1.0-pro', input: { diff --git a/docs/prompts.md b/docs/prompts.md new file mode 100644 index 0000000000..c193afd0be --- /dev/null +++ b/docs/prompts.md @@ -0,0 +1,106 @@ +# Prompts + +Prompt manipulation is the primary way that you, as an app developer, influence +the output of generative AI models. For example, when using LLMs, you can craft +prompts that influence the tone, format, length, and other characteristics of +the models’ responses. + +Genkit is designed around the premise that _prompts are code_. You write and +maintain your prompts in source files, track changes to them using the same version +control system that you use for your code, and you deploy them along with the code +that calls your generative AI models. + +Most developers will find that the included [Dotprompt](./dotprompt.md) library +meets their needs for working with prompts in Genkit. However, alternative +approaches are also supported by working with prompts directly. + +## Defining prompts + +Genkit's `generate()` helper function accepts string prompts, and you can +call models this way for straight-forward use cases. + +```ts +import { generate } from '@genkit-ai/ai'; + +generate({ + model: 'googleai/gemini-pro', + prompt: 'You are a helpful AI assistant named Walt.', +}); +``` + +In most cases, you will need to include some customer provided inputs in your prompt. +You could define a function to render them like this. + +```ts +function helloPrompt(name: string) { + return `You are a helpful AI assistant named Walt. Say hello to ${name}.`; +} + +generate({ + model: 'googleai/gemini-pro', + prompt: helloPrompt('Fred'), +}); +``` + +One shortcoming of defining prompts in your code is that testing requires executing +them as part of a flow. To faciliate more rapid iteration, Genkit provides a facility +to define your prompts and run them in the Developer UI. + +Use the `definePrompt` function to register your prompts with Genkit. + +```ts +import { definePrompt } from '@genkit-ai/ai'; +import z from 'zod'; + +export const helloPrompt = definePrompt( + { + name: 'helloPrompt', + inputSchema: z.object({ name: z.string() }), + }, + async (input) => { + const promptText = `You are a helpful AI assistant named Walt. + Say hello to ${input.name}.`; + + return { + messages: [{ role: 'user', content: [{ text: promptText }] }], + config: { temperature: 0.3 } + }); + } +); +``` + +A prompt action defines a function that returns a `GenerateRequest` object +which can be used with any model. Optionally, you can also define an input schema +for the prompt, which is analagous to the input schema for a flow. +Prompts can also define any of the common model configuration options, such as +temperature or number of output tokens. + +You can use this prompt in your code with the `renderPrompt()` helper function. +Provide the input variables expected by the prompt, and the model to call. + +```javascript +import { generate, render } from '@genkit-ai/ai'; + +generate( + renderPrompt({ + prompt: helloPrompt, + input: { name: 'Fred' }, + model: 'googleai/gemini-pro', + }) +); +``` + +In the Genkit Developer UI, you can run any prompt you have defined in this way. +This allows you to experiment with individual prompts outside of the scope of +the flows in which they might be used. + +## Dotprompt + +Genkit includes the [Dotprompt](./dotprompt.md) library which adds additional +functionality to prompts. + +- Loading prompts from `.prompt` source files +- Handlebars-based templates +- Support for multi-turn prompt templates and multimedia content +- Concise input and output schema definitions +- Fluent usage with `generate()` diff --git a/js/ai/src/index.ts b/js/ai/src/index.ts index 4b87c2abe6..06df9efec3 100644 --- a/js/ai/src/index.ts +++ b/js/ai/src/index.ts @@ -30,7 +30,9 @@ export { Message, generate, generateStream, + toGenerateRequest, } from './generate.js'; +export { PromptAction, definePrompt, renderPrompt } from './prompt.js'; export { IndexerAction, IndexerInfo, diff --git a/js/ai/src/prompt.ts b/js/ai/src/prompt.ts new file mode 100644 index 0000000000..2262d84897 --- /dev/null +++ b/js/ai/src/prompt.ts @@ -0,0 +1,102 @@ +/** + * Copyright 2024 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import { Action, action, JSONSchema7 } from '@genkit-ai/core'; +import { lookupAction, registerAction } from '@genkit-ai/core/registry'; +import { setCustomMetadataAttributes } from '@genkit-ai/core/tracing'; +import z from 'zod'; +import { GenerateOptions } from './generate'; +import { GenerateRequest, GenerateRequestSchema, ModelArgument } from './model'; + +export type PromptFn = ( + input: z.infer +) => Promise; + +export type PromptAction = Action< + I, + typeof GenerateRequestSchema +> & { + __action: { + metadata: { + type: 'prompt'; + }; + }; +}; + +export function definePrompt( + { + name, + description, + inputSchema, + inputJsonSchema, + metadata, + }: { + name: string; + description?: string; + inputSchema?: I; + inputJsonSchema?: JSONSchema7; + metadata?: Record; + }, + fn: PromptFn +): PromptAction { + const a = action( + { + name, + description, + inputSchema, + inputJsonSchema, + metadata: { ...(metadata || { prompt: {} }), type: 'prompt' }, + }, + (i: I): Promise => { + setCustomMetadataAttributes({ subtype: 'prompt' }); + return fn(i); + } + ); + registerAction('prompt', name, a); + return a as PromptAction; +} + +/** + * A veneer for rendering a prompt action to GenerateOptions. + */ + +export type PromptArgument = + | string + | PromptAction; + +export async function renderPrompt< + I extends z.ZodTypeAny = z.ZodTypeAny, + CustomOptions extends z.ZodTypeAny = z.ZodTypeAny, +>(params: { + prompt: PromptArgument; + input: z.infer; + model: ModelArgument; + config?: z.infer; +}): Promise { + let prompt: PromptAction; + if (typeof params.prompt === 'string') { + prompt = await lookupAction(`/prompt/${params.prompt}`); + } else { + prompt = params.prompt as PromptAction; + } + const rendered = await prompt(params.input); + return { + model: params.model, + config: { ...(rendered.config || {}), ...params.config }, + history: rendered.messages.slice(0, rendered.messages.length - 1), + prompt: rendered.messages[rendered.messages.length - 1].content, + }; +} diff --git a/js/dotprompt/src/index.ts b/js/dotprompt/src/index.ts index b66df4b883..36aafb6c39 100644 --- a/js/dotprompt/src/index.ts +++ b/js/dotprompt/src/index.ts @@ -17,18 +17,13 @@ import { readFileSync } from 'fs'; import { basename } from 'path'; -import z from 'zod'; - -import { registerAction } from '@genkit-ai/core/registry'; - -import { PromptMetadata } from './metadata.js'; -import { Prompt, PromptAction, PromptGenerateOptions } from './prompt.js'; +import { defineDotprompt, Dotprompt } from './prompt.js'; import { lookupPrompt } from './registry.js'; -export { Prompt, PromptAction, PromptGenerateOptions }; +export { defineDotprompt, Dotprompt }; -export function loadPromptFile(path: string): Prompt { - return Prompt.parse( +export function loadPromptFile(path: string): Dotprompt { + return Dotprompt.parse( basename(path).split('.')[0], readFileSync(path, 'utf-8') ); @@ -37,29 +32,16 @@ export function loadPromptFile(path: string): Prompt { export async function loadPromptUrl( name: string, url: string -): Promise { +): Promise { const fetch = (await import('node-fetch')).default; const response = await fetch(url); const text = await response.text(); - return Prompt.parse(name, text); + return Dotprompt.parse(name, text); } export async function prompt( name: string, options?: { variant?: string } -): Promise> { - return (await lookupPrompt(name, options?.variant)) as Prompt; -} - -export function definePrompt( - options: PromptMetadata, - template: string -): Prompt> { - const prompt = new Prompt(options, template); - registerAction( - 'prompt', - `${prompt.name}${prompt.variant ? `.${prompt.variant}` : ''}`, - prompt.action() - ); - return prompt; +): Promise> { + return (await lookupPrompt(name, options?.variant)) as Dotprompt; } diff --git a/js/dotprompt/src/prompt.ts b/js/dotprompt/src/prompt.ts index ae62682dbc..ba27ce479b 100644 --- a/js/dotprompt/src/prompt.ts +++ b/js/dotprompt/src/prompt.ts @@ -15,23 +15,18 @@ */ import { + definePrompt, generate, GenerateOptions, GenerateResponse, generateStream, GenerateStreamResponse, + PromptAction, + toGenerateRequest, } from '@genkit-ai/ai'; -import { - GenerateRequest, - GenerateRequestSchema, - GenerateResponseSchema, - GenerationCommonConfigSchema, - MessageData, -} from '@genkit-ai/ai/model'; -import { resolveTools, toToolDefinition } from '@genkit-ai/ai/tool'; -import { Action, action, GenkitError } from '@genkit-ai/core'; -import { JSONSchema, parseSchema, toJsonSchema } from '@genkit-ai/core/schema'; -import { setCustomMetadataAttributes } from '@genkit-ai/core/tracing'; +import { GenerationCommonConfigSchema, MessageData } from '@genkit-ai/ai/model'; +import { GenkitError } from '@genkit-ai/core'; +import { parseSchema } from '@genkit-ai/core/schema'; import { createHash } from 'crypto'; import fm, { FrontMatterResult } from 'front-matter'; import z from 'zod'; @@ -43,23 +38,7 @@ import { } from './metadata.js'; import { compile } from './template.js'; -const PromptActionInputSchema = GenerateRequestSchema.omit({ - messages: true, - tools: true, - output: true, -}).extend({ - model: z.string().optional(), - input: z.unknown().optional(), - tools: z.array(z.any()).optional(), - output: z - .object({ - format: z.enum(['json', 'text', 'media']).optional(), - schema: z.any().optional(), - jsonSchema: z.any().optional(), - }) - .optional(), -}); -export type PromptActionInput = z.infer; +export type PromptData = PromptFrontmatter & { template: string }; export type PromptGenerateOptions = Omit< GenerateOptions, @@ -69,18 +48,7 @@ export type PromptGenerateOptions = Omit< input?: V; }; -export type PromptData = PromptFrontmatter & { template: string }; - -export type PromptAction = Action< - typeof PromptActionInputSchema, - typeof GenerateResponseSchema, - Record & { - type: 'prompt'; - prompt: PromptData; - } ->; - -export class Prompt implements PromptMetadata { +export class Dotprompt implements PromptMetadata { name?: string; variant?: string; hash: string; @@ -95,12 +63,7 @@ export class Prompt implements PromptMetadata { config?: PromptMetadata['config']; candidates?: PromptMetadata['candidates']; - private _render: ( - input: Variables, - options?: { context?: string[]; history?: MessageData[] } - ) => MessageData[]; - - private _action?: PromptAction; + private _render: (input: Variables) => MessageData[]; static parse(name: string, source: string) { try { @@ -108,20 +71,20 @@ export class Prompt implements PromptMetadata { allowUnsafe: false, }) as FrontMatterResult; - return new Prompt( + return new Dotprompt( { ...toMetadata(fmResult.attributes), name } as PromptMetadata, fmResult.body ); } catch (e: any) { throw new GenkitError({ - source: 'dotprompt', + source: 'Dotprompt', status: 'INVALID_ARGUMENT', message: `Error parsing YAML frontmatter of '${name}' prompt: ${e.message}`, }); } } - static fromAction(action: PromptAction): Prompt { + static fromAction(action: PromptAction): Dotprompt { const { template, ...options } = action.__action.metadata!.prompt; const pm = options as PromptMetadata; if (pm.input?.schema) { @@ -131,8 +94,7 @@ export class Prompt implements PromptMetadata { if (pm.output?.schema) { pm.output.jsonSchema = options.output?.schema; } - const prompt = new Prompt(options as PromptMetadata, template); - prompt._action = action; + const prompt = new Dotprompt(options as PromptMetadata, template); return prompt; } @@ -140,7 +102,7 @@ export class Prompt implements PromptMetadata { this.name = options.name; this.variant = options.variant; this.model = options.model; - this.input = options.input; + this.input = options.input || { schema: z.any() }; this.output = options.output; this.tools = options.tools; this.config = options.config; @@ -174,25 +136,25 @@ export class Prompt implements PromptMetadata { return this._render({ ...this.input?.default, ...input }); } - async render( - options: PromptGenerateOptions - ): Promise { - const messages = this.renderMessages(options.input); - return { - config: this.config || {}, - messages, - output: this.output - ? { - format: this.output?.format, - schema: toJsonSchema({ - schema: this.output?.schema, - jsonSchema: this.output?.jsonSchema, - }), - } - : {}, - tools: (await resolveTools(this.tools)).map(toToolDefinition), - candidates: options.candidates || 1, - }; + toJSON(): PromptData { + return { ...toFrontmatter(this), template: this.template }; + } + + define(): void { + definePrompt( + { + name: `${this.name}${this.variant ? `.${this.variant}` : ''}`, + description: 'Defined by Dotprompt', + inputSchema: this.input?.schema, + inputJsonSchema: this.input?.jsonSchema, + metadata: { + type: 'prompt', + prompt: this.toJSON(), + }, + }, + async (input: Variables) => + toGenerateRequest(this.render({ input: input })) + ); } private _generateOptions( @@ -200,7 +162,7 @@ export class Prompt implements PromptMetadata { ): GenerateOptions { if (!options.model && !this.model) { throw new GenkitError({ - source: 'dotprompt', + source: 'Dotprompt', message: 'Must supply `model` in prompt metadata or generate options.', status: 'INVALID_ARGUMENT', }); @@ -218,79 +180,32 @@ export class Prompt implements PromptMetadata { schema: options.output?.schema || this.output?.schema, jsonSchema: options.output?.jsonSchema || this.output?.jsonSchema, }, - tools: this.tools?.concat(options.tools || []) || [], - streamingCallback: options.streamingCallback, + tools: (options.tools || []).concat(this.tools || []), }; } - async _generate(req: PromptGenerateOptions) { - return generate(this._generateOptions(req)); + render(opt: PromptGenerateOptions): GenerateOptions { + return this._generateOptions(opt); } async generate( - options: PromptGenerateOptions + opt: PromptGenerateOptions ): Promise { - const req = { ...options, tools: await resolveTools(options.tools) }; - const rendered = await this.render(options); // TODO: don't re-render to do this - if (options.candidates == 0) { - return new GenerateResponse({ candidates: [] }, rendered); - } else { - return new GenerateResponse(await this.action()(req), rendered); - } + return generate(this.render(opt)); } async generateStream( - options: PromptGenerateOptions + opt: PromptGenerateOptions ): Promise { - // TODO: properly wrap this in appropriate telemetry - return generateStream(this._generateOptions(options)); + return generateStream(this.render(opt)); } +} - toJSON(): PromptData { - return { ...toFrontmatter(this), template: this.template }; - } - - action(): PromptAction { - if (this._action) return this._action; - - this._action = action( - { - name: `${this.name}${this.variant ? `.${this.variant}` : ''}`, - inputSchema: PromptActionInputSchema, - outputSchema: GenerateResponseSchema, - metadata: { - type: 'prompt', - prompt: this.toJSON(), - }, - }, - (args) => { - setCustomMetadataAttributes({ subtype: 'prompt' }); - args.output = args.output - ? { - format: args.output?.format, - jsonSchema: toJsonSchema({ - schema: args.output.schema as z.ZodTypeAny, - jsonSchema: args.output.jsonSchema as JSONSchema, - }), - } - : undefined; - return this._generate(args as PromptGenerateOptions); - } - ) as PromptAction; - const actionJsonSchema = toJsonSchema({ - schema: PromptActionInputSchema.omit({ input: true }), - }); - if (this.input?.jsonSchema) { - // Prompt file case - (actionJsonSchema as any).properties.input = this.input.jsonSchema; - this._action.__action.inputJsonSchema = actionJsonSchema; - } else if (this.input?.schema) { - // definePrompt case - (actionJsonSchema as any).properties.input = toJsonSchema({ - schema: this.input.schema, - }); - this._action.__action.inputJsonSchema = actionJsonSchema; - } - return this._action; - } +export function defineDotprompt( + options: PromptMetadata, + template: string +): Dotprompt> { + const prompt = new Dotprompt(options, template); + prompt.define(); + return prompt; } diff --git a/js/dotprompt/src/registry.ts b/js/dotprompt/src/registry.ts index 67af3aec8f..48b7378a36 100644 --- a/js/dotprompt/src/registry.ts +++ b/js/dotprompt/src/registry.ts @@ -14,32 +14,31 @@ * limitations under the License. */ +import { PromptAction } from '@genkit-ai/ai'; import { config, GenkitError } from '@genkit-ai/core'; import { logger } from '@genkit-ai/core/logging'; -import { lookupAction, registerAction } from '@genkit-ai/core/registry'; +import { lookupAction } from '@genkit-ai/core/registry'; import { existsSync, readFileSync } from 'fs'; import { join } from 'path'; -import { Prompt, PromptAction } from './prompt.js'; +import { Dotprompt } from './prompt.js'; export async function lookupPrompt( name: string, variant?: string -): Promise { +): Promise { const registryPrompt = (await lookupAction( `/prompt/${name}${variant ? `.${variant}` : ''}` )) as PromptAction; - if (registryPrompt) return Prompt.fromAction(registryPrompt); - - const prompt = loadPrompt(name, variant); - registerAction( - 'prompt', - `${name}${variant ? `.${variant}` : ''}`, - prompt.action() - ); - return prompt; + if (registryPrompt) { + return Dotprompt.fromAction(registryPrompt); + } else { + const loadedPrompt = loadPrompt(name, variant); + loadedPrompt.define(); // register it + return loadedPrompt; + } } -function loadPrompt(name: string, variant?: string) { +export function loadPrompt(name: string, variant?: string) { const dir = config.options.promptDir || './prompts'; const promptExists = existsSync( join(dir, `${name}${variant ? `.${variant}` : ''}.prompt`) @@ -61,7 +60,7 @@ function loadPrompt(name: string, variant?: string) { join(dir, `${name}${variant ? `.${variant}` : ''}.prompt`), 'utf8' ); - const prompt = Prompt.parse(name, source); + const prompt = Dotprompt.parse(name, source); prompt.variant = variant; return prompt; } diff --git a/js/dotprompt/tests/prompt_test.ts b/js/dotprompt/tests/prompt_test.ts index 9bdc069108..e720374da5 100644 --- a/js/dotprompt/tests/prompt_test.ts +++ b/js/dotprompt/tests/prompt_test.ts @@ -20,10 +20,8 @@ import { describe, it } from 'node:test'; import { defineModel } from '@genkit-ai/ai/model'; import z from 'zod'; -import { defineTool } from '@genkit-ai/ai'; -import { toToolDefinition } from '@genkit-ai/ai/tool'; import { toJsonSchema, ValidationError } from '@genkit-ai/core/schema'; -import { definePrompt, prompt, Prompt } from '../src/index.js'; +import { defineDotprompt, Dotprompt, prompt } from '../src/index.js'; import { PromptMetadata } from '../src/metadata.js'; const echo = defineModel( @@ -35,8 +33,8 @@ const echo = defineModel( }) ); -function testPrompt(template, options?: Partial): Prompt { - return new Prompt({ name: 'test', model: echo, ...options }, template); +function testPrompt(template, options?: Partial): Dotprompt { + return new Dotprompt({ name: 'test', model: echo, ...options }, template); } describe('Prompt', () => { @@ -45,8 +43,8 @@ describe('Prompt', () => { const prompt = testPrompt(`Hello {{name}}, how are you?`); const rendered = await prompt.render({ input: { name: 'Michael' } }); - assert.deepStrictEqual(rendered.messages, [ - { role: 'user', content: [{ text: 'Hello Michael, how are you?' }] }, + assert.deepStrictEqual(rendered.prompt, [ + { text: 'Hello Michael, how are you?' }, ]); }); @@ -56,33 +54,43 @@ describe('Prompt', () => { }); const rendered = await prompt.render({ input: {} }); - assert.deepStrictEqual(rendered.messages, [ + assert.deepStrictEqual(rendered.prompt, [ { - role: 'user', - content: [{ text: 'Hello Fellow Human, how are you?' }], + text: 'Hello Fellow Human, how are you?', }, ]); }); - }); - - describe('#toJSON', () => { - it('should convert zod to json schema', () => { - const schema = z.object({ name: z.string() }); - - const prompt = testPrompt(`hello {{name}}`, { - input: { schema }, - }); - assert.deepStrictEqual( - prompt.toJSON().input?.schema, - toJsonSchema({ schema }) + it('rejects input not matching the schema', async () => { + const invalidSchemaPrompt = defineDotprompt( + { + name: 'invalidInput', + model: 'echo', + input: { + jsonSchema: { + properties: { foo: { type: 'boolean' } }, + required: ['foo'], + }, + }, + }, + `You asked for {{foo}}.` ); + + await assert.rejects(async () => { + await invalidSchemaPrompt.render({ input: { foo: 'baz' } }); + }, ValidationError); }); }); describe('#generate', () => { + it('renders and calls the model', async () => { + const prompt = testPrompt(`Hello {{name}}, how are you?`); + const response = await prompt.generate({ input: { name: 'Bob' } }); + assert.equal(response.text(), `Hello Bob, how are you?`); + }); + it('rejects input not matching the schema', async () => { - const invalidSchemaPrompt = definePrompt( + const invalidSchemaPrompt = defineDotprompt( { name: 'invalidInput', model: 'echo', @@ -100,25 +108,20 @@ describe('Prompt', () => { await invalidSchemaPrompt.generate({ input: { foo: 'baz' } }); }, ValidationError); }); + }); - const tinyPrompt = definePrompt( - { - name: 'littlePrompt', - model: 'echo', - input: { schema: z.any() }, - }, - `Tiny prompt` - ); - - it('includes its request in the response', async () => { - const response = await tinyPrompt.generate({ input: {} }); - assert.notEqual(response.request, undefined); - }); + describe('#toJSON', () => { + it('should convert zod to json schema', () => { + const schema = z.object({ name: z.string() }); + + const prompt = testPrompt(`hello {{name}}`, { + input: { schema }, + }); - it('does not call the model when candidates==0', async () => { - const response = await tinyPrompt.generate({ candidates: 0, input: {} }); - assert.notEqual(response.request, undefined); - assert.equal(response.candidates.length, 0); + assert.deepStrictEqual( + prompt.toJSON().input?.schema, + toJsonSchema({ schema }) + ); }); }); @@ -126,7 +129,7 @@ describe('Prompt', () => { it('should throw a good error for invalid YAML', () => { assert.throws( () => { - Prompt.parse( + Dotprompt.parse( 'example', `--- input: { @@ -143,7 +146,7 @@ This is the rest of the prompt` }); it('should parse picoschema', () => { - const p = Prompt.parse( + const p = Dotprompt.parse( 'example', `--- input: @@ -173,7 +176,7 @@ output: describe('definePrompt', () => { it('registers a prompt and its variant', async () => { - definePrompt( + defineDotprompt( { name: 'promptName', model: 'echo', @@ -181,7 +184,7 @@ output: `This is a prompt.` ); - definePrompt( + defineDotprompt( { name: 'promptName', variant: 'variantName', @@ -199,30 +202,4 @@ output: assert.equal('And this is its variant.', variantPrompt.template); }); }); - - it('resolves its tools when generating', async () => { - const tool = defineTool( - { - name: 'testTool', - description: 'Just a test', - inputSchema: z.string(), - outputSchema: z.string(), - }, - async (input) => { - return 'result'; - } - ); - - const prompt = definePrompt( - { - name: 'promptName', - model: 'echo', - tools: [tool], - }, - `This is a prompt.` - ); - - const out = await prompt.generate({ input: 'test' }); - assert.deepEqual(out.request?.tools, [toToolDefinition(tool)]); - }); }); diff --git a/js/samples/coffee-shop/src/index.ts b/js/samples/coffee-shop/src/index.ts index 96f19ba0cc..8582fded51 100644 --- a/js/samples/coffee-shop/src/index.ts +++ b/js/samples/coffee-shop/src/index.ts @@ -15,7 +15,7 @@ */ import { initializeGenkit } from '@genkit-ai/core'; -import { definePrompt } from '@genkit-ai/dotprompt'; +import { defineDotprompt } from '@genkit-ai/dotprompt'; import { defineFlow, runFlow } from '@genkit-ai/flow'; import { geminiPro } from '@genkit-ai/vertexai'; import * as z from 'zod'; @@ -32,7 +32,7 @@ const CustomerNameSchema = z.object({ customerName: z.string(), }); -const simpleGreetingPrompt = definePrompt( +const simpleGreetingPrompt = defineDotprompt( { name: 'simpleGreeting', model: geminiPro, @@ -68,7 +68,7 @@ const CustomerTimeAndHistorySchema = z.object({ previousOrder: z.string(), }); -const greetingWithHistoryPrompt = definePrompt( +const greetingWithHistoryPrompt = defineDotprompt( { name: 'greetingWithHistory', model: geminiPro, diff --git a/js/samples/dev-ui-gallery/src/main/prompts.ts b/js/samples/dev-ui-gallery/src/main/prompts.ts index 0ce0d07ffc..aaeed7a58b 100644 --- a/js/samples/dev-ui-gallery/src/main/prompts.ts +++ b/js/samples/dev-ui-gallery/src/main/prompts.ts @@ -14,7 +14,7 @@ * limitations under the License. */ -import { definePrompt, prompt } from '@genkit-ai/dotprompt'; +import { defineDotprompt, prompt } from '@genkit-ai/dotprompt'; import { defineFlow } from '@genkit-ai/flow'; import { geminiPro } from '@genkit-ai/googleai'; import * as z from 'zod'; @@ -28,7 +28,7 @@ import '../genkit.config.js'; const promptName = 'codeDefinedPrompt'; const template = 'Say hello to {{name}} in the voice of a {{persona}}.'; -export const codeDefinedPrompt = definePrompt( +export const codeDefinedPrompt = defineDotprompt( { name: promptName, model: geminiPro, @@ -72,7 +72,7 @@ export const codeDefinedPrompt = definePrompt( template ); -export const codeDefinedPromptVariant = definePrompt( +export const codeDefinedPromptVariant = defineDotprompt( { name: promptName, variant: 'jsonOutput', diff --git a/js/samples/menu/src/01/prompts.ts b/js/samples/menu/src/01/prompts.ts index 953b72c497..f632dc7c17 100644 --- a/js/samples/menu/src/01/prompts.ts +++ b/js/samples/menu/src/01/prompts.ts @@ -14,16 +14,41 @@ * limitations under the License. */ -import { definePrompt } from '@genkit-ai/dotprompt'; +import { definePrompt } from '@genkit-ai/ai'; +import { GenerateRequest } from '@genkit-ai/ai/model'; +import { defineDotprompt } from '@genkit-ai/dotprompt'; import { geminiPro } from '@genkit-ai/vertexai'; -import { MenuQuestionInputSchema } from '../types'; +import { MenuQuestionInput, MenuQuestionInputSchema } from '../types'; // Define a prompt to handle a customer question about the menu. -// The daily menu is hard-coded into the prompt. +// This prompt uses definePrompt directly. -export const s01_staticMenuPrompt = definePrompt( +export const s01_vanillaPrompt = definePrompt( { - name: 's01_staticMenu', + name: 's01_vanillaPrompt', + inputSchema: MenuQuestionInputSchema, + }, + async (input: MenuQuestionInput): Promise => { + const promptText = ` + You are acting as a helpful AI assistant named "Walt" that can answer + questions about the food available on the menu at Walt's Burgers. + Customer says: ${input.question} + `; + + return { + messages: [{ role: 'user', content: [{ text: promptText }] }], + config: { temperature: 0.3 }, + }; + } +); + +// Define another prompt which uses the Dotprompt library +// that also gives us a type-safe handlebars template system, +// and well-defined output schemas. + +export const s01_staticMenuDotPrompt = defineDotprompt( + { + name: 's01_staticMenuDotPrompt', model: geminiPro, input: { schema: MenuQuestionInputSchema }, output: { format: 'text' }, diff --git a/js/samples/menu/src/02/flows.ts b/js/samples/menu/src/02/flows.ts index 27cd73cba0..ef2bc5761c 100644 --- a/js/samples/menu/src/02/flows.ts +++ b/js/samples/menu/src/02/flows.ts @@ -14,16 +14,11 @@ * limitations under the License. */ -import { generate } from '@genkit-ai/ai'; import { defineFlow } from '@genkit-ai/flow'; -import { geminiPro } from '@genkit-ai/vertexai'; import { AnswerOutputSchema, MenuQuestionInputSchema } from '../types'; import { s02_dataMenuPrompt } from './prompts'; -import { menuTool } from './tools'; // Define a flow which generates a response from the prompt. -// The prompt uses a tool which will load the menu data, -// if the user asks a reasonable question about the menu. export const s02_menuQuestionFlow = defineFlow( { @@ -32,14 +27,12 @@ export const s02_menuQuestionFlow = defineFlow( outputSchema: AnswerOutputSchema, }, async (input) => { - // Note, using generate() instead of Prompt.generate() - // to work around a bug in tool usage. - return generate({ - model: geminiPro, - tools: [menuTool], // This tool includes the menu - prompt: s02_dataMenuPrompt.renderText({ question: input.question }), - }).then((response) => { - return { answer: response.text() }; - }); + return s02_dataMenuPrompt + .generate({ + input: { question: input.question }, + }) + .then((response) => { + return { answer: response.text() }; + }); } ); diff --git a/js/samples/menu/src/02/prompts.ts b/js/samples/menu/src/02/prompts.ts index 52cddb76d4..c9350bd901 100644 --- a/js/samples/menu/src/02/prompts.ts +++ b/js/samples/menu/src/02/prompts.ts @@ -14,12 +14,15 @@ * limitations under the License. */ -import { definePrompt } from '@genkit-ai/dotprompt'; +import { defineDotprompt } from '@genkit-ai/dotprompt'; import { geminiPro } from '@genkit-ai/vertexai'; import { MenuQuestionInputSchema } from '../types'; import { menuTool } from './tools'; -export const s02_dataMenuPrompt = definePrompt( +// The prompt uses a tool which will load the menu data, +// if the user asks a reasonable question about the menu. + +export const s02_dataMenuPrompt = defineDotprompt( { name: 's02_dataMenu', model: geminiPro, @@ -28,11 +31,13 @@ export const s02_dataMenuPrompt = definePrompt( tools: [menuTool], }, ` -You are acting as a helpful AI assistant named "Walt" that can answer +You are acting as a helpful AI assistant named Walt that can answer questions about the food available on the menu at Walt's Burgers. Answer this customer's question, in a concise and helpful manner, as long as it is about food on the menu or something harmless like sports. +Use the tools available to answer menu questions. +DO NOT INVENT ITEMS NOT ON THE MENU. Question: {{question}} ? diff --git a/js/samples/menu/src/03/prompts.ts b/js/samples/menu/src/03/prompts.ts index 2b27159890..18abc13bbe 100644 --- a/js/samples/menu/src/03/prompts.ts +++ b/js/samples/menu/src/03/prompts.ts @@ -14,14 +14,14 @@ * limitations under the License. */ -import { definePrompt } from '@genkit-ai/dotprompt'; +import { defineDotprompt } from '@genkit-ai/dotprompt'; import { geminiPro } from '@genkit-ai/vertexai'; import { DataMenuQuestionInputSchema } from '../types'; // This prompt will generate two messages when rendered. // These two messages will be used to seed the exchange with the model. -export const s03_chatPreamblePrompt = definePrompt( +export const s03_chatPreamblePrompt = defineDotprompt( { name: 's03_chatPreamble', model: geminiPro, diff --git a/js/samples/menu/src/04/prompts.ts b/js/samples/menu/src/04/prompts.ts index b18bd9a397..ddb5dc28a3 100644 --- a/js/samples/menu/src/04/prompts.ts +++ b/js/samples/menu/src/04/prompts.ts @@ -14,11 +14,11 @@ * limitations under the License. */ -import { definePrompt } from '@genkit-ai/dotprompt'; +import { defineDotprompt } from '@genkit-ai/dotprompt'; import { geminiPro } from '@genkit-ai/vertexai'; import { DataMenuQuestionInputSchema } from '../types'; -export const s04_ragDataMenuPrompt = definePrompt( +export const s04_ragDataMenuPrompt = defineDotprompt( { name: 's04_ragDataMenu', model: geminiPro, diff --git a/js/samples/menu/src/05/prompts.ts b/js/samples/menu/src/05/prompts.ts index 30c6ed0a43..cfcf10669f 100644 --- a/js/samples/menu/src/05/prompts.ts +++ b/js/samples/menu/src/05/prompts.ts @@ -14,12 +14,12 @@ * limitations under the License. */ -import { definePrompt } from '@genkit-ai/dotprompt'; +import { defineDotprompt } from '@genkit-ai/dotprompt'; import { geminiPro, geminiProVision } from '@genkit-ai/vertexai'; import * as z from 'zod'; import { TextMenuQuestionInputSchema } from '../types'; -export const s05_readMenuPrompt = definePrompt( +export const s05_readMenuPrompt = defineDotprompt( { name: 's05_readMenu', model: geminiProVision, @@ -39,7 +39,7 @@ from the following image of a restaurant menu. ` ); -export const s05_textMenuPrompt = definePrompt( +export const s05_textMenuPrompt = defineDotprompt( { name: 's05_textMenu', model: geminiPro, diff --git a/js/samples/menu/src/index.ts b/js/samples/menu/src/index.ts index a1d15438b6..6fe41e62af 100644 --- a/js/samples/menu/src/index.ts +++ b/js/samples/menu/src/index.ts @@ -23,7 +23,7 @@ initializeGenkit(config); // Export all of the example prompts and flows // 01 -export { s01_staticMenuPrompt } from './01/prompts'; +export { s01_staticMenuDotPrompt, s01_vanillaPrompt } from './01/prompts'; // 02 export { s02_menuQuestionFlow } from './02/flows'; export { s02_dataMenuPrompt } from './02/prompts'; diff --git a/js/samples/prompt-file/prompts/story.prompt b/js/samples/prompt-file/prompts/story.prompt index b5df3101d7..3312eae50f 100644 --- a/js/samples/prompt-file/prompts/story.prompt +++ b/js/samples/prompt-file/prompts/story.prompt @@ -2,11 +2,9 @@ model: googleai/gemini-pro input: schema: - properties: - subject: {type: string} - required: [subject] + subject: string output: format: text --- -Tell me a story about {{subject}}. \ No newline at end of file +Tell me a story about {{subject}}. diff --git a/js/samples/prompt-file/src/index.ts b/js/samples/prompt-file/src/index.ts index d76e308da4..e681276b1c 100644 --- a/js/samples/prompt-file/src/index.ts +++ b/js/samples/prompt-file/src/index.ts @@ -19,33 +19,10 @@ import { defineFlow } from '@genkit-ai/flow'; import * as z from 'zod'; import './genkit.config'; -defineFlow( - { - name: 'tellStory', - inputSchema: z.string(), - outputSchema: z.string(), - streamSchema: z.string(), - }, - async (subject, streamingCallback) => { - const storyPrompt = await prompt('story'); - const { response, stream } = await storyPrompt.generateStream({ - input: { subject }, - }); - if (streamingCallback) { - for await (const chunk of stream()) { - streamingCallback(chunk.content[0]?.text!); - } - } - return (await response()).text(); - } -); - -prompt('story'); // This example demonstrates using prompt files in a flow - // Load the prompt file during initialization. -// If it fails, due to the file being invalid, the process will crash -// instead of us getting a weird failure later when the flow runs. +// If it fails, due to the prompt file being invalid, the process will crash, +// instead of us getting a more mysterious failure later when the flow runs. prompt('recipe').then((recipePrompt) => { defineFlow( @@ -72,3 +49,30 @@ prompt('recipe', { variant: 'robot' }).then((recipePrompt) => { async (input) => (await recipePrompt.generate({ input: input })).output() ); }); + +// A variation that supports streaming, optionally + +prompt('story').then((storyPrompt) => { + defineFlow( + { + name: 'tellStory', + inputSchema: z.string(), + outputSchema: z.string(), + streamSchema: z.string(), + }, + async (subject, streamingCallback) => { + if (streamingCallback) { + const { response, stream } = await storyPrompt.generateStream({ + input: { subject }, + }); + for await (const chunk of stream()) { + streamingCallback(chunk.content[0]?.text!); + } + return (await response()).text(); + } else { + const response = await storyPrompt.generate({ input: { subject } }); + return response.text(); + } + } + ); +}); diff --git a/js/samples/rag/src/prompt.ts b/js/samples/rag/src/prompt.ts index cf9fa2d1bc..fec34b3ab6 100644 --- a/js/samples/rag/src/prompt.ts +++ b/js/samples/rag/src/prompt.ts @@ -14,13 +14,13 @@ * limitations under the License. */ -import { definePrompt } from '@genkit-ai/dotprompt'; +import { defineDotprompt } from '@genkit-ai/dotprompt'; import { geminiPro } from '@genkit-ai/vertexai'; import * as z from 'zod'; // Define a prompt that includes the retrieved context documents -export const augmentedPrompt = definePrompt( +export const augmentedPrompt = defineDotprompt( { name: 'augmentedPrompt', model: geminiPro,