From 27dd9cf2c3a8573e880450fdd839726615ea9a41 Mon Sep 17 00:00:00 2001 From: Brace Sproul Date: Wed, 1 Nov 2023 11:13:05 -0700 Subject: [PATCH 1/8] Added documentation for few shot prompting --- .../prompts/prompt_templates/few_shot.mdx | 157 ++++++++++++++++++ 1 file changed, 157 insertions(+) create mode 100644 docs/docs/modules/model_io/prompts/prompt_templates/few_shot.mdx diff --git a/docs/docs/modules/model_io/prompts/prompt_templates/few_shot.mdx b/docs/docs/modules/model_io/prompts/prompt_templates/few_shot.mdx new file mode 100644 index 00000000000..98db8ba95c6 --- /dev/null +++ b/docs/docs/modules/model_io/prompts/prompt_templates/few_shot.mdx @@ -0,0 +1,157 @@ +# Few Shot Chat Message Prompt Templates + +Few shot prompting is a prompting technique which provides the Large Language Model (LLM) with a list of examples, and then asks the LLM to generate some text based on the examples provided. + +An example of this is the following: + +Say you want your LLM to respond in a specific format. You can few shot prompt the LLM with a list of question answer pairs so it knows what format to respond in. + +```txt +Respond to the users question in the with the following format: + +Question: What is your name? +Answer: My name is John. + +Question: What is your age? +Answer: I am 25 years old. + +Question: What is your favorite color? +Answer: +``` + +Here we left the last `Answer:` undefined so the LLM can fill it in. The LLM will then generate the following: + +```txt +Answer: I don't have a favorite color; I don't have preferences. +``` + +### Use Case + +In the following example we're few shotting the LLM to rephrase questions into more general queries. + +We provide two sets of examples with specific questions, and rephrased general questions. The `FewShotChatMessagePromptTemplate` will use our examples and when `.format` is called, we'll see those examples formatted into a string we can pass to the LLM. + +```typescript +import { + ChatPromptTemplate, + FewShotChatMessagePromptTemplate, +} from "langchain/prompts"; +``` + +```typescript +const examples = [ + { + input: "Could the members of The Police perform lawful arrests?", + output: "what can the members of The Police do?", + }, + { + input: "Jan Sindel's was born in what country?", + output: "what is Jan Sindel's personal history?", + }, +]; +const examplePrompt = ChatPromptTemplate.fromMessages([ + ["human", "{input}"], + ["ai", "{output}"], +]); +const fewShotPrompt = new FewShotChatMessagePromptTemplate({ + examplePrompt, + examples, + inputVariables: [], // no input variables +}); +``` + +```typescript +const formattedPrompt = await fewShotPrompt.format({}); +console.log(formattedPrompt); +``` + +```txt +Could the members of The Police perform lawful arrests? + +what can the members of The Police do? + +Jan Sindel's was born in what country? + +what is Jan Sindel's personal history? +``` + +Then, if we use this with another question, the LLM will rephrase the question how we want. + +```typescript +import { ChatOpenAI } from "langchain/chat_models/openai"; +``` + +```typescript +const model = new ChatOpenAI({}); + +const prompt = ChatPromptTemplate.fromMessages([ + [ + "ai", + `Rephrase the users query to be more general, using the following examples: +{few_shot_examples} +User query: {input}`, + ], +]); + +const response = await prompt.pipe(model).invoke({ + input: "What's France's main city?", + few_shot_examples: formattedPrompt, +}); + +console.log(response); +``` + +```txt +AIMessage { + lc_namespace: [ 'langchain', 'schema' ], + content: 'What is the capital of France?', + additional_kwargs: { function_call: undefined } +} +``` + +### Few Shotting With Functions + +You can also partial with a function. The use case for this is when you have a variable you know that you always want to fetch in a common way. A prime example of this is with date or time. Imagine you have a prompt which you always want to have the current date. You can't hard code it in the prompt, and passing it along with the other input variables can be tedious. In this case, it's very handy to be able to partial the prompt with a function that always returns the current date. + +```typescript +const getCurrentDate = () => { + return new Date().toISOString(); +}; + +const prompt = new FewShotChatMessagePromptTemplate({ + template: "Tell me a {adjective} joke about the day {date}", + inputVariables: ["adjective", "date"], +}); + +const partialPrompt = await prompt.partial({ + date: getCurrentDate, +}); + +const formattedPrompt = await partialPrompt.format({ + adjective: "funny", +}); + +console.log(formattedPrompt); + +// Tell me a funny joke about the day 2023-07-13T00:54:59.287Z +``` + +You can also just initialize the prompt with the partialed variables: + +```typescript +const prompt = new FewShotChatMessagePromptTemplate({ + template: "Tell me a {adjective} joke about the day {date}", + inputVariables: ["adjective"], + partialVariables: { + date: getCurrentDate, + }, +}); + +const formattedPrompt = await prompt.format({ + adjective: "funny", +}); + +console.log(formattedPrompt); + +// Tell me a funny joke about the day 2023-07-13T00:54:59.287Z +``` \ No newline at end of file From cb8391d01aa2ecd2770163fa2a9a26625773b245 Mon Sep 17 00:00:00 2001 From: Brace Sproul Date: Wed, 1 Nov 2023 11:35:29 -0700 Subject: [PATCH 2/8] added non chat model docs --- .../prompts/prompt_templates/few_shot.mdx | 93 ++++++++++++++++++- 1 file changed, 92 insertions(+), 1 deletion(-) diff --git a/docs/docs/modules/model_io/prompts/prompt_templates/few_shot.mdx b/docs/docs/modules/model_io/prompts/prompt_templates/few_shot.mdx index 98db8ba95c6..f4d904a0f5e 100644 --- a/docs/docs/modules/model_io/prompts/prompt_templates/few_shot.mdx +++ b/docs/docs/modules/model_io/prompts/prompt_templates/few_shot.mdx @@ -1,4 +1,4 @@ -# Few Shot Chat Message Prompt Templates +# Few Shot Prompt Templates Few shot prompting is a prompting technique which provides the Large Language Model (LLM) with a list of examples, and then asks the LLM to generate some text based on the examples provided. @@ -154,4 +154,95 @@ const formattedPrompt = await prompt.format({ console.log(formattedPrompt); // Tell me a funny joke about the day 2023-07-13T00:54:59.287Z +``` + +## With Non Chat Models + +LangChain also provides a class for few shot prompt formatting for non chat models: `FewShotPromptTemplate`. The API is largely the same, but the output is formatted (chat messages vs strings) + + +### Partial Formatting + +```typescript +import { + ChatPromptTemplate, + FewShotChatMessagePromptTemplate, +} from "langchain/prompts"; +``` + +```typescript +const examplePrompt = PromptTemplate.fromTemplate("{foo}{bar}"); +const prompt = new FewShotPromptTemplate({ + prefix: "{foo}{bar}", + examplePrompt, + inputVariables: ["foo"], + partialVariables: { bar: "baz" }, // Automatically set bar to baz +}); +const formatted = await prompt.format({ foo: "foo" }); +console.log(formatted); +``` + +```txt +foobaz\n +``` + +### With Functions + +```typescript +import { + ChatPromptTemplate, + FewShotChatMessagePromptTemplate, +} from "langchain/prompts"; +``` + +```typescript +const examplePrompt = PromptTemplate.fromTemplate("{foo}{bar}"); +const prompt = new FewShotPromptTemplate({ + prefix: "{foo}{bar}", + examplePrompt, + inputVariables: ["foo", "bar"], +}); +const partialPrompt = await prompt.partial({ + foo: () => Promise.resolve("boo"), +}); +const formatted = await partialPrompt.format({ bar: "baz" }); +console.log(formatted); +``` + +```txt +boobaz\n +``` + +### With Functions and Example Selector + +```typescript +import { + ChatPromptTemplate, + FewShotChatMessagePromptTemplate, +} from "langchain/prompts"; +``` + +```typescript +const examplePrompt = PromptTemplate.fromTemplate("An example about {x}"); +const exampleSelector = await LengthBasedExampleSelector.fromExamples( + [{ x: "foo" }, { x: "bar" }], + { examplePrompt, maxLength: 200 } +); +const prompt = new FewShotPromptTemplate({ + prefix: "{foo}{bar}", + exampleSelector, + examplePrompt, + inputVariables: ["foo", "bar"], +}); +const partialPrompt = await prompt.partial({ + foo: () => Promise.resolve("boo"), +}); +const formatted = await partialPrompt.format({ bar: "baz" }); +console.log(formatted); +``` + +```txt +boobaz +An example about foo +An example about bar ``` \ No newline at end of file From 4076f9e6d14b32a3c424017feabf31797ebf4838 Mon Sep 17 00:00:00 2001 From: Brace Sproul Date: Wed, 1 Nov 2023 11:38:03 -0700 Subject: [PATCH 3/8] chore: lint files --- .../modules/model_io/prompts/prompt_templates/few_shot.mdx | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/docs/docs/modules/model_io/prompts/prompt_templates/few_shot.mdx b/docs/docs/modules/model_io/prompts/prompt_templates/few_shot.mdx index f4d904a0f5e..68e91c09c3a 100644 --- a/docs/docs/modules/model_io/prompts/prompt_templates/few_shot.mdx +++ b/docs/docs/modules/model_io/prompts/prompt_templates/few_shot.mdx @@ -98,7 +98,7 @@ const response = await prompt.pipe(model).invoke({ few_shot_examples: formattedPrompt, }); -console.log(response); +console.log(response); ``` ```txt @@ -160,7 +160,6 @@ console.log(formattedPrompt); LangChain also provides a class for few shot prompt formatting for non chat models: `FewShotPromptTemplate`. The API is largely the same, but the output is formatted (chat messages vs strings) - ### Partial Formatting ```typescript @@ -245,4 +244,4 @@ console.log(formatted); boobaz An example about foo An example about bar -``` \ No newline at end of file +``` From eedb9fdabb0dcba9b4b1a1941d8c1fd359b8ed6b Mon Sep 17 00:00:00 2001 From: Brace Sproul Date: Wed, 1 Nov 2023 12:05:16 -0700 Subject: [PATCH 4/8] docs nit --- .../modules/model_io/prompts/prompt_templates/few_shot.mdx | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/docs/modules/model_io/prompts/prompt_templates/few_shot.mdx b/docs/docs/modules/model_io/prompts/prompt_templates/few_shot.mdx index 68e91c09c3a..4fddb163001 100644 --- a/docs/docs/modules/model_io/prompts/prompt_templates/few_shot.mdx +++ b/docs/docs/modules/model_io/prompts/prompt_templates/few_shot.mdx @@ -1,6 +1,6 @@ # Few Shot Prompt Templates -Few shot prompting is a prompting technique which provides the Large Language Model (LLM) with a list of examples, and then asks the LLM to generate some text based on the examples provided. +Few shot prompting is a prompting technique which provides the Large Language Model (LLM) with a list of examples, and then asks the LLM to generate some text following the lead of the examples provided. An example of this is the following: @@ -158,7 +158,7 @@ console.log(formattedPrompt); ## With Non Chat Models -LangChain also provides a class for few shot prompt formatting for non chat models: `FewShotPromptTemplate`. The API is largely the same, but the output is formatted (chat messages vs strings) +LangChain also provides a class for few shot prompt formatting for non chat models: `FewShotPromptTemplate`. The API is largely the same, but the output is formatted differently (chat messages vs strings). ### Partial Formatting From 018c5167eb36210e852a0548974fcddaccdedbd4 Mon Sep 17 00:00:00 2001 From: Brace Sproul Date: Wed, 1 Nov 2023 15:44:28 -0700 Subject: [PATCH 5/8] add distinction between chat and non chat --- .../prompts/prompt_templates/few_shot.mdx | 112 ++++++++++++------ 1 file changed, 77 insertions(+), 35 deletions(-) diff --git a/docs/docs/modules/model_io/prompts/prompt_templates/few_shot.mdx b/docs/docs/modules/model_io/prompts/prompt_templates/few_shot.mdx index 4fddb163001..1e32040b75f 100644 --- a/docs/docs/modules/model_io/prompts/prompt_templates/few_shot.mdx +++ b/docs/docs/modules/model_io/prompts/prompt_templates/few_shot.mdx @@ -136,56 +136,98 @@ console.log(formattedPrompt); // Tell me a funny joke about the day 2023-07-13T00:54:59.287Z ``` -You can also just initialize the prompt with the partialed variables: +### Few Shot vs Chat Few Shot -```typescript -const prompt = new FewShotChatMessagePromptTemplate({ - template: "Tell me a {adjective} joke about the day {date}", - inputVariables: ["adjective"], - partialVariables: { - date: getCurrentDate, - }, -}); - -const formattedPrompt = await prompt.format({ - adjective: "funny", -}); - -console.log(formattedPrompt); - -// Tell me a funny joke about the day 2023-07-13T00:54:59.287Z -``` - -## With Non Chat Models - -LangChain also provides a class for few shot prompt formatting for non chat models: `FewShotPromptTemplate`. The API is largely the same, but the output is formatted differently (chat messages vs strings). - -### Partial Formatting +The chat and non chat few shot prompt templates act in a similar way. The below example will demonstrate using chat and non chat, and the differences with their outputs. ```typescript import { - ChatPromptTemplate, + FewShotPromptTemplate, FewShotChatMessagePromptTemplate, } from "langchain/prompts"; ``` ```typescript -const examplePrompt = PromptTemplate.fromTemplate("{foo}{bar}"); -const prompt = new FewShotPromptTemplate({ - prefix: "{foo}{bar}", +const examples = [ + { + input: "Could the members of The Police perform lawful arrests?", + output: "what can the members of The Police do?", + }, + { + input: "Jan Sindel's was born in what country?", + output: "what is Jan Sindel's personal history?", + }, +]; +const examplePrompt = PromptTemplate.fromTemplate(`Human: {input} +AI: {output}`); +const exampleChatPrompt = ChatPromptTemplate.fromMessages([ + ["human", "{input}"], + ["ai", "{output}"], +]); +const chatFewShotPrompt = new FewShotChatMessagePromptTemplate({ + examplePrompt: exampleChatPrompt, + examples, + inputVariables: [], // no input variables +}); +const fewShotPrompt = new FewShotPromptTemplate({ examplePrompt, - inputVariables: ["foo"], - partialVariables: { bar: "baz" }, // Automatically set bar to baz + examples, + inputVariables: [], // no input variables }); -const formatted = await prompt.format({ foo: "foo" }); -console.log(formatted); ``` -```txt -foobaz\n +```typescript +console.log("Chat Few Shot: ", await chatFewShotPrompt.formatMessages({})); +/** +Chat Few Shot: [ + HumanMessage { + lc_namespace: [ 'langchain', 'schema' ], + content: 'Could the members of The Police perform lawful arrests?', + additional_kwargs: {} + }, + AIMessage { + lc_namespace: [ 'langchain', 'schema' ], + content: 'what can the members of The Police do?', + additional_kwargs: {} + }, + HumanMessage { + lc_namespace: [ 'langchain', 'schema' ], + content: "Jan Sindel's was born in what country?", + additional_kwargs: {} + }, + AIMessage { + lc_namespace: [ 'langchain', 'schema' ], + content: "what is Jan Sindel's personal history?", + additional_kwargs: {} + } +] + */ +``` + +```typescript +console.log("Few Shot: ", await fewShotPrompt.formatPromptValue({})); +/** +Few Shot: + +Human: Could the members of The Police perform lawful arrests? +AI: what can the members of The Police do? + +Human: Jan Sindel's was born in what country? +AI: what is Jan Sindel's personal history? + */ ``` -### With Functions +Here we can see the main distinctions between `FewShotChatMessagePromptTemplate` and `FewShotPromptTemplate`: input and output values. + +`FewShotChatMessagePromptTemplate` works by taking in a list of `ChatPromptTemplate` for examples, and its output is a list of instances of `BaseMessage`. + +On the other hand, `FewShotPromptTemplate` works by taking in a `PromptTemplate` for examples, and its output is a string. + +## With Non Chat Models + +LangChain also provides a class for few shot prompt formatting for non chat models: `FewShotPromptTemplate`. The API is largely the same, but the output is formatted differently (chat messages vs strings). + +### Partials With Functions ```typescript import { From 52693494479e14e8312e8fc93cc8166ae7db8476 Mon Sep 17 00:00:00 2001 From: Brace Sproul Date: Wed, 1 Nov 2023 16:21:03 -0700 Subject: [PATCH 6/8] use fromTemplate --- .../prompts/prompt_templates/few_shot.mdx | 71 ++++++++----------- 1 file changed, 31 insertions(+), 40 deletions(-) diff --git a/docs/docs/modules/model_io/prompts/prompt_templates/few_shot.mdx b/docs/docs/modules/model_io/prompts/prompt_templates/few_shot.mdx index 1e32040b75f..d455b517aae 100644 --- a/docs/docs/modules/model_io/prompts/prompt_templates/few_shot.mdx +++ b/docs/docs/modules/model_io/prompts/prompt_templates/few_shot.mdx @@ -49,10 +49,8 @@ const examples = [ output: "what is Jan Sindel's personal history?", }, ]; -const examplePrompt = ChatPromptTemplate.fromMessages([ - ["human", "{input}"], - ["ai", "{output}"], -]); +const examplePrompt = ChatPromptTemplate.fromTemplate(`Human: {input} +AI: {output}`); const fewShotPrompt = new FewShotChatMessagePromptTemplate({ examplePrompt, examples, @@ -65,14 +63,21 @@ const formattedPrompt = await fewShotPrompt.format({}); console.log(formattedPrompt); ``` -```txt -Could the members of The Police perform lawful arrests? - -what can the members of The Police do? - -Jan Sindel's was born in what country? - -what is Jan Sindel's personal history? +```typescript +[ + HumanMessage { + lc_namespace: [ 'langchain', 'schema' ], + content: 'Human: Could the members of The Police perform lawful arrests?\n' + + 'AI: what can the members of The Police do?', + additional_kwargs: {} + }, + HumanMessage { + lc_namespace: [ 'langchain', 'schema' ], + content: "Human: Jan Sindel's was born in what country?\n" + + "AI: what is Jan Sindel's personal history?", + additional_kwargs: {} + } +] ``` Then, if we use this with another question, the LLM will rephrase the question how we want. @@ -84,14 +89,10 @@ import { ChatOpenAI } from "langchain/chat_models/openai"; ```typescript const model = new ChatOpenAI({}); -const prompt = ChatPromptTemplate.fromMessages([ - [ - "ai", - `Rephrase the users query to be more general, using the following examples: +const prompt = + ChatPromptTemplate.fromTemplate(`Rephrase the users query to be more general, using the following examples: {few_shot_examples} -User query: {input}`, - ], -]); +User query: {input}`); const response = await prompt.pipe(model).invoke({ input: "What's France's main city?", @@ -101,7 +102,7 @@ const response = await prompt.pipe(model).invoke({ console.log(response); ``` -```txt +```typescript AIMessage { lc_namespace: [ 'langchain', 'schema' ], content: 'What is the capital of France?', @@ -158,19 +159,17 @@ const examples = [ output: "what is Jan Sindel's personal history?", }, ]; -const examplePrompt = PromptTemplate.fromTemplate(`Human: {input} -AI: {output}`); -const exampleChatPrompt = ChatPromptTemplate.fromMessages([ - ["human", "{input}"], - ["ai", "{output}"], -]); +const prompt = `Human: {input} +AI: {output}`; +const examplePromptTemplate = PromptTemplate.fromTemplate(prompt); +const exampleChatPromptTemplate = ChatPromptTemplate.fromTemplate(prompt); const chatFewShotPrompt = new FewShotChatMessagePromptTemplate({ - examplePrompt: exampleChatPrompt, + examplePrompt: exampleChatPromptTemplate, examples, inputVariables: [], // no input variables }); const fewShotPrompt = new FewShotPromptTemplate({ - examplePrompt, + examplePrompt: examplePromptTemplate, examples, inputVariables: [], // no input variables }); @@ -182,22 +181,14 @@ console.log("Chat Few Shot: ", await chatFewShotPrompt.formatMessages({})); Chat Few Shot: [ HumanMessage { lc_namespace: [ 'langchain', 'schema' ], - content: 'Could the members of The Police perform lawful arrests?', - additional_kwargs: {} - }, - AIMessage { - lc_namespace: [ 'langchain', 'schema' ], - content: 'what can the members of The Police do?', + content: 'Human: Could the members of The Police perform lawful arrests?\n' + + 'AI: what can the members of The Police do?', additional_kwargs: {} }, HumanMessage { lc_namespace: [ 'langchain', 'schema' ], - content: "Jan Sindel's was born in what country?", - additional_kwargs: {} - }, - AIMessage { - lc_namespace: [ 'langchain', 'schema' ], - content: "what is Jan Sindel's personal history?", + content: "Human: Jan Sindel's was born in what country?\n" + + "AI: what is Jan Sindel's personal history?", additional_kwargs: {} } ] From 1abd42f8b4119f1a21f274eabe319dd9933e5ee2 Mon Sep 17 00:00:00 2001 From: Brace Sproul Date: Fri, 3 Nov 2023 09:38:01 -0700 Subject: [PATCH 7/8] updated example --- .../prompts/prompt_templates/few_shot.mdx | 30 ++++++++++++++----- 1 file changed, 22 insertions(+), 8 deletions(-) diff --git a/docs/docs/modules/model_io/prompts/prompt_templates/few_shot.mdx b/docs/docs/modules/model_io/prompts/prompt_templates/few_shot.mdx index d455b517aae..69f2c554a4f 100644 --- a/docs/docs/modules/model_io/prompts/prompt_templates/few_shot.mdx +++ b/docs/docs/modules/model_io/prompts/prompt_templates/few_shot.mdx @@ -88,17 +88,31 @@ import { ChatOpenAI } from "langchain/chat_models/openai"; ```typescript const model = new ChatOpenAI({}); - -const prompt = - ChatPromptTemplate.fromTemplate(`Rephrase the users query to be more general, using the following examples: -{few_shot_examples} -User query: {input}`); - -const response = await prompt.pipe(model).invoke({ +const examples = [ + { + input: "Could the members of The Police perform lawful arrests?", + output: "what can the members of The Police do?", + }, + { + input: "Jan Sindel's was born in what country?", + output: "what is Jan Sindel's personal history?", + }, +]; +const examplePrompt = ChatPromptTemplate.fromTemplate(`Human: {input} +AI: {output}`); +const fewShotPrompt = new FewShotChatMessagePromptTemplate({ + prefix: + "Rephrase the users query to be more general, using the following examples", + suffix: "Human: {input}", + examplePrompt, + examples, + inputVariables: ["input"], +}); +const formattedPrompt = await fewShotPrompt.format({ input: "What's France's main city?", - few_shot_examples: formattedPrompt, }); +const response = await model.invoke(formattedPrompt); console.log(response); ``` From 42c08c816e1dff5c6432b9b0af50e36a63cfc34d Mon Sep 17 00:00:00 2001 From: Brace Sproul Date: Tue, 14 Nov 2023 17:17:09 -0800 Subject: [PATCH 8/8] fix docs location --- .../docs/modules/model_io/prompts/prompt_templates/few_shot.mdx | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename docs/{ => core_docs}/docs/modules/model_io/prompts/prompt_templates/few_shot.mdx (100%) diff --git a/docs/docs/modules/model_io/prompts/prompt_templates/few_shot.mdx b/docs/core_docs/docs/modules/model_io/prompts/prompt_templates/few_shot.mdx similarity index 100% rename from docs/docs/modules/model_io/prompts/prompt_templates/few_shot.mdx rename to docs/core_docs/docs/modules/model_io/prompts/prompt_templates/few_shot.mdx