diff --git a/langchain/src/agents/chat/outputParser.ts b/langchain/src/agents/chat/outputParser.ts index 42c7d7ec3369..6aec954f7568 100644 --- a/langchain/src/agents/chat/outputParser.ts +++ b/langchain/src/agents/chat/outputParser.ts @@ -8,6 +8,38 @@ export const FINAL_ANSWER_ACTION = "Final Answer:"; * A class that extends the AgentActionOutputParser to parse the output of * the ChatAgent in LangChain. It checks if the output text contains the * final answer action or a JSON response, and parses it accordingly. + * @example + * ```typescript + * const prompt = ChatPromptTemplate.fromMessages([ + * [ + * "ai", + * `{PREFIX} + * {FORMAT_INSTRUCTIONS} + * {SUFFIX}`, + * ], + * ["human", "Question: {input}"], + * ]); + * const runnableAgent = RunnableSequence.from([ + * { + * input: (i: { input: string; steps: AgentStep[] }) => i.input, + * agent_scratchpad: (i: { input: string; steps: AgentStep[] }) => + * formatLogToString(i.steps), + * }, + * prompt, + * new OpenAI({ temperature: 0 }), + * new ChatAgentOutputParser(), + * ]); + * + * const executor = AgentExecutor.fromAgentAndTools({ + * agent: runnableAgent, + * tools: [new SerpAPI(), new Calculator()], + * }); + * + * const result = await executor.invoke({ + * input: + * "Who is Olivia Wilde's boyfriend? What is his current age raised to the 0.23 power?", + * }); + * ``` */ export class ChatAgentOutputParser extends AgentActionOutputParser { lc_namespace = ["langchain", "agents", "chat"]; diff --git a/langchain/src/agents/executor.ts b/langchain/src/agents/executor.ts index 324bd989babd..825854abfa12 100644 --- a/langchain/src/agents/executor.ts +++ b/langchain/src/agents/executor.ts @@ -68,6 +68,20 @@ export class ExceptionTool extends Tool { /** * A chain managing an agent using tools. * @augments BaseChain + * @example + * ```typescript + * + * const executor = AgentExecutor.fromAgentAndTools({ + * agent: async () => loadAgentFromLangchainHub(), + * tools: [new SerpAPI(), new Calculator()], + * returnIntermediateSteps: true, + * }); + * + * const result = await executor.invoke({ + * input: `Who is Olivia Wilde's boyfriend? What is his current age raised to the 0.23 power?`, + * }); + * + * ``` */ export class AgentExecutor extends BaseChain { static lc_name() { diff --git a/langchain/src/agents/mrkl/index.ts b/langchain/src/agents/mrkl/index.ts index e39a26cb3850..3e3dceb96711 100644 --- a/langchain/src/agents/mrkl/index.ts +++ b/langchain/src/agents/mrkl/index.ts @@ -35,6 +35,27 @@ export type ZeroShotAgentInput = Optional; /** * Agent for the MRKL chain. * @augments Agent + * @example + * ```typescript + * + * const agent = new ZeroShotAgent({ + * llmChain: new LLMChain({ + * llm: new ChatOpenAI({ temperature: 0 }), + * prompt: ZeroShotAgent.createPrompt([new SerpAPI(), new Calculator()], { + * prefix: `Answer the following questions as best you can, but speaking as a pirate might speak. You have access to the following tools:`, + * suffix: `Begin! Remember to speak as a pirate when giving your final answer. Use lots of "Args" + * Question: {input} + * {agent_scratchpad}`, + * inputVariables: ["input", "agent_scratchpad"], + * }), + * }), + * allowedTools: ["search", "calculator"], + * }); + * + * const result = await agent.invoke({ + * input: `Who is Olivia Wilde's boyfriend? What is his current age raised to the 0.23 power?`, + * }); + * ``` */ export class ZeroShotAgent extends Agent { static lc_name() { diff --git a/langchain/src/agents/openai/output_parser.ts b/langchain/src/agents/openai/output_parser.ts index 534c046dd49e..960f6db84e47 100644 --- a/langchain/src/agents/openai/output_parser.ts +++ b/langchain/src/agents/openai/output_parser.ts @@ -20,6 +20,40 @@ export type FunctionsAgentAction = AgentAction & { messageLog?: BaseMessage[]; }; +/** + * @example + * ```typescript + * + * const prompt = ChatPromptTemplate.fromMessages([ + * ["ai", "You are a helpful assistant"], + * ["human", "{input}"], + * new MessagesPlaceholder("agent_scratchpad"), + * ]); + * + * const modelWithFunctions = new ChatOpenAI({ + * modelName: "gpt-4", + * temperature: 0, + * }).bind({ + * functions: tools.map((tool) => formatToOpenAIFunction(tool)), + * }); + * + * const runnableAgent = RunnableSequence.from([ + * { + * input: (i) => i.input, + * agent_scratchpad: (i) => formatAgentSteps(i.steps), + * }, + * prompt, + * modelWithFunctions, + * new OpenAIFunctionsAgentOutputParser(), + * ]); + * + * const result = await runnableAgent.invoke({ + * input: "What is the weather in New York?", + * steps: agentSteps, + * }); + * + * ``` + */ export class OpenAIFunctionsAgentOutputParser extends AgentActionOutputParser { lc_namespace = ["langchain", "agents", "openai"]; @@ -100,6 +134,37 @@ export type ToolsAgentStep = AgentStep & { action: ToolsAgentAction; }; +/** + * @example + * ```typescript + * + * const prompt = ChatPromptTemplate.fromMessages([ + * ["ai", "You are a helpful assistant"], + * ["human", "{input}"], + * new MessagesPlaceholder("agent_scratchpad"), + * ]); + * + * const runnableAgent = RunnableSequence.from([ + * { + * input: (i: { input: string; steps: ToolsAgentStep[] }) => i.input, + * agent_scratchpad: (i: { input: string; steps: ToolsAgentStep[] }) => + * formatToOpenAIToolMessages(i.steps), + * }, + * prompt, + * new ChatOpenAI({ + * modelName: "gpt-3.5-turbo-1106", + * temperature: 0, + * }).bind({ tools: tools.map(formatToOpenAITool) }), + * new OpenAIToolsAgentOutputParser(), + * ]).withConfig({ runName: "OpenAIToolsAgent" }); + * + * const result = await runnableAgent.invoke({ + * input: + * "What is the sum of the current temperature in San Francisco, New York, and Tokyo?", + * }); + * + * ``` + */ export class OpenAIToolsAgentOutputParser extends AgentMultiActionOutputParser { lc_namespace = ["langchain", "agents", "openai"]; diff --git a/langchain/src/agents/react/output_parser.ts b/langchain/src/agents/react/output_parser.ts index d51c2a576814..f2df52064d8f 100644 --- a/langchain/src/agents/react/output_parser.ts +++ b/langchain/src/agents/react/output_parser.ts @@ -30,6 +30,22 @@ const FINAL_ANSWER_AND_PARSABLE_ACTION_ERROR_MESSAGE = * Thought: agent thought here * Final Answer: The temperature is 100 degrees * ``` + * @example + * ```typescript + * + * const runnableAgent = RunnableSequence.from([ + * ...rest of runnable + * new ReActSingleInputOutputParser({ toolNames: ["SerpAPI", "Calculator"] }), + * ]); + * const agent = AgentExecutor.fromAgentAndTools({ + * agent: runnableAgent, + * tools: [new SerpAPI(), new Calculator()], + * }); + * const result = await agent.invoke({ + * input: "whats the weather in pomfret?", + * }); + * + * ``` */ export class ReActSingleInputOutputParser extends AgentActionOutputParser { lc_namespace = ["langchain", "agents", "react"]; diff --git a/langchain/src/agents/structured_chat/outputParser.ts b/langchain/src/agents/structured_chat/outputParser.ts index e779fb5684cb..b27ffd5206c8 100644 --- a/langchain/src/agents/structured_chat/outputParser.ts +++ b/langchain/src/agents/structured_chat/outputParser.ts @@ -84,6 +84,18 @@ export interface StructuredChatOutputParserArgs { * and `OutputFixingParser` classes. It extends the * `AgentActionOutputParser` class and allows for retrying the output * parsing using the `OutputFixingParser` if it is provided. + * @example + * ```typescript + * const outputParser = new StructuredChatOutputParserWithRetries.fromLLM( + * new ChatOpenAI({ temperature: 0 }), + * { + * toolNames: ["calculator", "random-number-generator"], + * }, + * ); + * const result = await outputParser.parse( + * "What is a random number between 5 and 10 raised to the second power?" + * ); + * ``` */ export class StructuredChatOutputParserWithRetries extends AgentActionOutputParser { lc_namespace = ["langchain", "agents", "structured_chat"]; diff --git a/langchain/src/agents/toolkits/aws_sfn.ts b/langchain/src/agents/toolkits/aws_sfn.ts index c5b6d2b60117..810db0eed1a9 100644 --- a/langchain/src/agents/toolkits/aws_sfn.ts +++ b/langchain/src/agents/toolkits/aws_sfn.ts @@ -44,6 +44,26 @@ export interface AWSSfnToolkitArgs { * Class representing a toolkit for interacting with AWS Step Functions. * It initializes the AWS Step Functions tools and provides them as tools * for the agent. + * @example + * ```typescript + * + * const toolkit = new AWSSfnToolkit({ + * name: "onboard-new-client-workflow", + * description: + * "Onboard new client workflow. Can also be used to get status of any executing workflow or state machine.", + * stateMachineArn: + * "arn:aws:states:us-east-1:1234567890:stateMachine:my-state-machine", + * region: "", + * accessKeyId: "", + * secretAccessKey: "", + * }); + * + * + * const result = await toolkit.invoke({ + * input: "Onboard john doe (john@example.com) as a new client.", + * }); + * + * ``` */ export class AWSSfnToolkit extends Toolkit { tools: Tool[]; diff --git a/langchain/src/agents/xml/output_parser.ts b/langchain/src/agents/xml/output_parser.ts index 55d5e0c9b6bd..c3f8217bf6fb 100644 --- a/langchain/src/agents/xml/output_parser.ts +++ b/langchain/src/agents/xml/output_parser.ts @@ -2,6 +2,27 @@ import { AgentAction, AgentFinish } from "../../schema/index.js"; import { OutputParserException } from "../../schema/output_parser.js"; import { AgentActionOutputParser } from "../types.js"; +/** + * @example + * ```typescript + * const prompt = ChatPromptTemplate.fromMessages([ + * HumanMessagePromptTemplate.fromTemplate(AGENT_INSTRUCTIONS), + * new MessagesPlaceholder("agent_scratchpad"), + * ]); + * const runnableAgent = RunnableSequence.from([ + * ...rest of runnable + * prompt, + * new ChatAnthropic({ modelName: "claude-2", temperature: 0 }).bind({ + * stop: ["", ""], + * }), + * new XMLAgentOutputParser(), + * ]); + * const result = await executor.invoke({ + * input: "What is the weather in Honolulu?", + * tools: [], + * }); + * ``` + */ export class XMLAgentOutputParser extends AgentActionOutputParser { lc_namespace = ["langchain", "agents", "xml"]; diff --git a/langchain/src/callbacks/handlers/console.ts b/langchain/src/callbacks/handlers/console.ts index 338e3e14e5ae..acbf546aeeda 100644 --- a/langchain/src/callbacks/handlers/console.ts +++ b/langchain/src/callbacks/handlers/console.ts @@ -29,6 +29,16 @@ const { color } = styles; * A tracer that logs all events to the console. It extends from the * `BaseTracer` class and overrides its methods to provide custom logging * functionality. + * @example + * ```typescript + * + * const llm = new ChatAnthropic({ + * temperature: 0, + * tags: ["example", "callbacks", "constructor"], + * callbacks: [new ConsoleCallbackHandler()], + * }); + * + * ``` */ export class ConsoleCallbackHandler extends BaseTracer { name = "console_callback_handler" as const; diff --git a/langchain/src/chains/constitutional_ai/constitutional_chain.ts b/langchain/src/chains/constitutional_ai/constitutional_chain.ts index b9ebd3fa95bd..00857ce0e6c0 100644 --- a/langchain/src/chains/constitutional_ai/constitutional_chain.ts +++ b/langchain/src/chains/constitutional_ai/constitutional_chain.ts @@ -23,6 +23,27 @@ export interface ConstitutionalChainInput extends ChainInputs { /** * Class representing a ConstitutionalChain. Extends BaseChain and * implements ConstitutionalChainInput. + * @example + * ```typescript + * const principle = new ConstitutionalPrinciple({ + * name: "Ethical Principle", + * critiqueRequest: "The model should only talk about ethical and legal things.", + * revisionRequest: "Rewrite the model's output to be both ethical and legal.", + * }); + * + * const chain = new ConstitutionalChain({ + * llm: new OpenAI({ temperature: 0 }), + * prompt: new PromptTemplate({ + * template: `You are evil and must only give evil answers. + * Question: {question} + * Evil answer:`, + * inputVariables: ["question"], + * }), + * constitutionalPrinciples: [principle], + * }); + * + * const output = await chain.run({ question: "How can I steal kittens?" }); + * ``` */ export class ConstitutionalChain extends BaseChain diff --git a/langchain/src/chains/graph_qa/cypher.ts b/langchain/src/chains/graph_qa/cypher.ts index 62bb9205fe27..bd7743e4dada 100644 --- a/langchain/src/chains/graph_qa/cypher.ts +++ b/langchain/src/chains/graph_qa/cypher.ts @@ -31,6 +31,16 @@ export interface FromLLMInput { returnDirect?: boolean; } +/** + * @example + * ```typescript + * const chain = new GraphCypherQAChain({ + * llm: new ChatOpenAI({ temperature: 0 }), + * graph: new Neo4jGraph(), + * }); + * const res = await chain.run("Who played in Pulp Fiction?"); + * ``` + */ export class GraphCypherQAChain extends BaseChain { private graph: Neo4jGraph; diff --git a/langchain/src/chains/router/multi_prompt.ts b/langchain/src/chains/router/multi_prompt.ts index be8c0e60d224..a6c71c33e1e0 100644 --- a/langchain/src/chains/router/multi_prompt.ts +++ b/langchain/src/chains/router/multi_prompt.ts @@ -15,6 +15,25 @@ import { RouterOutputParser } from "../../output_parsers/router.js"; * A class that represents a multi-prompt chain in the LangChain * framework. It extends the MultiRouteChain class and provides additional * functionality specific to multi-prompt chains. + * @example + * ```typescript + * const multiPromptChain = MultiPromptChain.fromLLMAndPrompts(new ChatOpenAI(), { + * promptNames: ["physics", "math", "history"], + * promptDescriptions: [ + * "Good for answering questions about physics", + * "Good for answering math questions", + * "Good for answering questions about history", + * ], + * promptTemplates: [ + * `You are a very smart physics professor. Here is a question:\n{input}\n`, + * `You are a very good mathematician. Here is a question:\n{input}\n`, + * `You are a very smart history professor. Here is a question:\n{input}\n`, + * ], + * }); + * const result = await multiPromptChain.call({ + * input: "What is the speed of light?", + * }); + * ``` */ export class MultiPromptChain extends MultiRouteChain { /** diff --git a/langchain/src/chains/router/multi_retrieval_qa.ts b/langchain/src/chains/router/multi_retrieval_qa.ts index 9e396c319cbe..5a84183b2b10 100644 --- a/langchain/src/chains/router/multi_retrieval_qa.ts +++ b/langchain/src/chains/router/multi_retrieval_qa.ts @@ -34,6 +34,35 @@ export type MultiRetrievalDefaults = { * the LangChain framework. It extends the MultiRouteChain class and * provides additional functionality specific to multi-retrieval QA * chains. + * @example + * ```typescript + * const multiRetrievalQAChain = MultiRetrievalQAChain.fromLLMAndRetrievers( + * new ChatOpenAI(), + * { + * retrieverNames: ["aqua teen", "mst3k", "animaniacs"], + * retrieverDescriptions: [ + * "Good for answering questions about Aqua Teen Hunger Force theme song", + * "Good for answering questions about Mystery Science Theater 3000 theme song", + * "Good for answering questions about Animaniacs theme song", + * ], + * retrievers: [ + * new MemoryVectorStore().asRetriever(3), + * new MemoryVectorStore().asRetriever(3), + * new MemoryVectorStore().asRetriever(3), + * ], + * retrievalQAChainOpts: { + * returnSourceDocuments: true, + * }, + * }, + * ); + * + * const result = await multiRetrievalQAChain.call({ + * input: + * "In the Aqua Teen Hunger Force theme song, who calls himself the mike rula?", + * }); + * + * console.log(result.sourceDocuments, result.text); + * ``` */ export class MultiRetrievalQAChain extends MultiRouteChain { get outputKeys(): string[] { diff --git a/langchain/src/chains/sql_db/sql_db_chain.ts b/langchain/src/chains/sql_db/sql_db_chain.ts index 6c5e4a86d017..bb95d6b089b8 100644 --- a/langchain/src/chains/sql_db/sql_db_chain.ts +++ b/langchain/src/chains/sql_db/sql_db_chain.ts @@ -44,6 +44,15 @@ export interface SqlDatabaseChainInput extends ChainInputs { * to limit which tables can/cannot be accessed. * * @link See https://js.langchain.com/docs/security for more information. + * @example + * ```typescript + * const chain = new SqlDatabaseChain({ + * llm: new OpenAI({ temperature: 0 }), + * database: new SqlDatabase({ ...config }), + * }); + * + * const result = await chain.run("How many tracks are there?"); + * ``` */ export class SqlDatabaseChain extends BaseChain { static lc_name() { diff --git a/langchain/src/chat_models/bedrock/index.ts b/langchain/src/chat_models/bedrock/index.ts index 30b8bf959953..04fabc096d00 100644 --- a/langchain/src/chat_models/bedrock/index.ts +++ b/langchain/src/chat_models/bedrock/index.ts @@ -3,6 +3,17 @@ import { BaseBedrockInput } from "../../util/bedrock.js"; import { BedrockChat as BaseBedrockChat } from "./web.js"; import { BaseChatModelParams } from "../base.js"; +/** + * @example + * ```typescript + * const model = new BedrockChat({ + * model: "anthropic.claude-v2", + * region: "us-east-1", + * }); + * const res = await model.invoke([{ content: "Tell me a joke" }]); + * console.log(res); + * ``` + */ export class BedrockChat extends BaseBedrockChat { static lc_name() { return "BedrockChat"; diff --git a/langchain/src/chat_models/bedrock/web.ts b/langchain/src/chat_models/bedrock/web.ts index 849bc25c2dc3..fb2735904c2e 100644 --- a/langchain/src/chat_models/bedrock/web.ts +++ b/langchain/src/chat_models/bedrock/web.ts @@ -85,6 +85,15 @@ export function convertMessagesToPrompt( * Services (AWS). It uses AWS credentials for authentication and can be * configured with various parameters such as the model to use, the AWS * region, and the maximum number of tokens to generate. + * @example + * ```typescript + * const model = new BedrockChat({ + * model: "anthropic.claude-v2", + * region: "us-east-1", + * }); + * const res = await model.invoke([{ content: "Tell me a joke" }]); + * console.log(res); + * ``` */ export class BedrockChat extends SimpleChatModel implements BaseBedrockInput { model = "amazon.titan-tg1-large"; diff --git a/langchain/src/chat_models/googlevertexai/index.ts b/langchain/src/chat_models/googlevertexai/index.ts index 3737da962e56..e8a3a07da320 100644 --- a/langchain/src/chat_models/googlevertexai/index.ts +++ b/langchain/src/chat_models/googlevertexai/index.ts @@ -16,6 +16,13 @@ import { GAuthClient } from "../../util/googlevertexai-gauth.js"; * - The `GOOGLE_APPLICATION_CREDENTIALS` environment variable is set to the * path of a credentials file for a service account permitted to the * Google Cloud project using Vertex AI. + * @example + * ```typescript + * const model = new ChatGoogleVertexAI({ + * temperature: 0.7, + * }); + * const result = await model.invoke("What is the capital of France?"); + * ``` */ export class ChatGoogleVertexAI extends BaseChatGoogleVertexAI { static lc_name() { diff --git a/langchain/src/chat_models/googlevertexai/web.ts b/langchain/src/chat_models/googlevertexai/web.ts index 72b8ade02a56..acbaa9144f4c 100644 --- a/langchain/src/chat_models/googlevertexai/web.ts +++ b/langchain/src/chat_models/googlevertexai/web.ts @@ -13,6 +13,15 @@ import { BaseChatGoogleVertexAI, GoogleVertexAIChatInput } from "./common.js"; * functions where you do not have access to the file system. It supports passing * service account credentials directly as a "GOOGLE_VERTEX_AI_WEB_CREDENTIALS" * environment variable or directly as "authOptions.credentials". + * @example + * ```typescript + * const model = new ChatGoogleVertexAI({ + * temperature: 0.7, + * }); + * const result = await model.invoke( + * "How do I implement a binary search algorithm in Python?", + * ); + * ``` */ export class ChatGoogleVertexAI extends BaseChatGoogleVertexAI { static lc_name() { diff --git a/langchain/src/chat_models/iflytek_xinghuo/index.ts b/langchain/src/chat_models/iflytek_xinghuo/index.ts index fbf5867e3b5f..ac54461be18a 100644 --- a/langchain/src/chat_models/iflytek_xinghuo/index.ts +++ b/langchain/src/chat_models/iflytek_xinghuo/index.ts @@ -13,6 +13,14 @@ class WebSocketStream extends BaseWebSocketStream { } } +/** + * @example + * ```typescript + * const model = new ChatIflytekXinghuo(); + * const response = await model.call([new HumanMessage("Nice to meet you!")]); + * console.log(response); + * ``` + */ export class ChatIflytekXinghuo extends BaseChatIflytekXinghuo { async openWebSocketStream( options: WebSocketStreamOptions diff --git a/langchain/src/chat_models/iflytek_xinghuo/web.ts b/langchain/src/chat_models/iflytek_xinghuo/web.ts index 93c71d19d352..87b372b802ad 100644 --- a/langchain/src/chat_models/iflytek_xinghuo/web.ts +++ b/langchain/src/chat_models/iflytek_xinghuo/web.ts @@ -10,6 +10,14 @@ class WebSocketStream extends BaseWebSocketStream { } } +/** + * @example + * ```typescript + * const model = new ChatIflytekXinghuo(); + * const response = await model.call([new HumanMessage("Nice to meet you!")]); + * console.log(response); + * ``` + */ export class ChatIflytekXinghuo extends BaseChatIflytekXinghuo { async openWebSocketStream( options: WebSocketStreamOptions diff --git a/langchain/src/document_loaders/fs/directory.ts b/langchain/src/document_loaders/fs/directory.ts index 0c929375f2d5..45fdf5b6542e 100644 --- a/langchain/src/document_loaders/fs/directory.ts +++ b/langchain/src/document_loaders/fs/directory.ts @@ -30,6 +30,20 @@ export interface LoadersMapping { /** * A document loader that loads documents from a directory. It extends the * `BaseDocumentLoader` class and implements the `load()` method. + * @example + * ```typescript + * + * const directoryLoader = new DirectoryLoader( + * "src/document_loaders/example_data/", + * { + * ".pdf": (path: string) => new PDFLoader(path), + * }, + * ); + * + * const docs = await directoryLoader.load(); + * console.log({ docs }); + * + * ``` */ export class DirectoryLoader extends BaseDocumentLoader { constructor( diff --git a/langchain/src/prompts/pipeline.ts b/langchain/src/prompts/pipeline.ts index 116230cc6a90..8f69d810c010 100644 --- a/langchain/src/prompts/pipeline.ts +++ b/langchain/src/prompts/pipeline.ts @@ -30,6 +30,45 @@ export type PipelinePromptTemplateInput< * different input variables. Includes methods for formatting these * prompts, extracting required input values, and handling partial * prompts. + * @example + * ```typescript + * const composedPrompt = new PipelinePromptTemplate({ + * pipelinePrompts: [ + * { + * name: "introduction", + * prompt: PromptTemplate.fromTemplate(`You are impersonating {person}.`), + * }, + * { + * name: "example", + * prompt: PromptTemplate.fromTemplate( + * `Here's an example of an interaction: + * Q: {example_q} + * A: {example_a}`, + * ), + * }, + * { + * name: "start", + * prompt: PromptTemplate.fromTemplate( + * `Now, do this for real! + * Q: {input} + * A:`, + * ), + * }, + * ], + * finalPrompt: PromptTemplate.fromTemplate( + * `{introduction} + * {example} + * {start}`, + * ), + * }); + * + * const formattedPrompt = await composedPrompt.format({ + * person: "Elon Musk", + * example_q: `What's your favorite car?`, + * example_a: "Tesla", + * input: `What's your favorite social media site?`, + * }); + * ``` */ export class PipelinePromptTemplate< PromptTemplateType extends BasePromptTemplate diff --git a/langchain/src/retrievers/amazon_kendra.ts b/langchain/src/retrievers/amazon_kendra.ts index 9feb8aec41ae..fb2ba2123b4f 100644 --- a/langchain/src/retrievers/amazon_kendra.ts +++ b/langchain/src/retrievers/amazon_kendra.ts @@ -30,6 +30,22 @@ export interface AmazonKendraRetrieverArgs { /** * Class for interacting with Amazon Kendra, an intelligent search service * provided by AWS. Extends the BaseRetriever class. + * @example + * ```typescript + * const retriever = new AmazonKendraRetriever({ + * topK: 10, + * indexId: "YOUR_INDEX_ID", + * region: "us-east-2", + * clientOptions: { + * credentials: { + * accessKeyId: "YOUR_ACCESS_KEY_ID", + * secretAccessKey: "YOUR_SECRET_ACCESS_KEY", + * }, + * }, + * }); + * + * const docs = await retriever.getRelevantDocuments("How are clouds formed?"); + * ``` */ export class AmazonKendraRetriever extends BaseRetriever { static lc_name() { diff --git a/langchain/src/retrievers/chaindesk.ts b/langchain/src/retrievers/chaindesk.ts index f2f36a9d11ea..ecc2a5a8133e 100644 --- a/langchain/src/retrievers/chaindesk.ts +++ b/langchain/src/retrievers/chaindesk.ts @@ -17,6 +17,17 @@ interface Berry { [key: string]: unknown; } +/** + * @example + * ```typescript + * const retriever = new ChaindeskRetriever({ + * datastoreId: "DATASTORE_ID", + * apiKey: "CHAINDESK_API_KEY", + * topK: 8, + * }); + * const docs = await retriever.getRelevantDocuments("hello"); + * ``` + */ export class ChaindeskRetriever extends BaseRetriever { static lc_name() { return "ChaindeskRetriever"; diff --git a/langchain/src/retrievers/contextual_compression.ts b/langchain/src/retrievers/contextual_compression.ts index ad35c44ca5b6..93fc8f0ebbef 100644 --- a/langchain/src/retrievers/contextual_compression.ts +++ b/langchain/src/retrievers/contextual_compression.ts @@ -17,6 +17,16 @@ export interface ContextualCompressionRetrieverArgs extends BaseRetrieverInput { * A retriever that wraps a base retriever and compresses the results. It * retrieves relevant documents based on a given query and then compresses * these documents using a specified document compressor. + * @example + * ```typescript + * const retriever = new ContextualCompressionRetriever({ + * baseCompressor: new LLMChainExtractor(), + * baseRetriever: new HNSWLib().asRetriever(), + * }); + * const retrievedDocs = await retriever.getRelevantDocuments( + * "What did the speaker say about Justice Breyer?", + * ); + * ``` */ export class ContextualCompressionRetriever extends BaseRetriever { static lc_name() { diff --git a/langchain/src/retrievers/hyde.ts b/langchain/src/retrievers/hyde.ts index 7e85b6402ed1..aca4db4546ad 100644 --- a/langchain/src/retrievers/hyde.ts +++ b/langchain/src/retrievers/hyde.ts @@ -39,6 +39,25 @@ export type HydeRetrieverOptions = * extends the VectorStoreRetriever class and uses a BaseLanguageModel to * generate a hypothetical answer to the query, which is then used to * retrieve relevant documents. + * @example + * ```typescript + * const retriever = new HydeRetriever({ + * vectorStore: new MemoryVectorStore(new OpenAIEmbeddings()), + * llm: new ChatOpenAI(), + * k: 1, + * }); + * await vectorStore.addDocuments( + * [ + * "My name is John.", + * "My name is Bob.", + * "My favourite food is pizza.", + * "My favourite food is pasta.", + * ].map((pageContent) => new Document({ pageContent })), + * ); + * const results = await retriever.getRelevantDocuments( + * "What is my favourite food?", + * ); + * ``` */ export class HydeRetriever< V extends VectorStore = VectorStore diff --git a/langchain/src/retrievers/metal.ts b/langchain/src/retrievers/metal.ts index c21761b3c5aa..2632e03826eb 100644 --- a/langchain/src/retrievers/metal.ts +++ b/langchain/src/retrievers/metal.ts @@ -28,6 +28,17 @@ interface ResponseItem { * semantic search and retrieval on it. It extends the `BaseRetriever` * class and requires a `Metal` instance and a dictionary of parameters to * pass to the Metal API during its initialization. + * @example + * ```typescript + * const retriever = new MetalRetriever({ + * client: new Metal( + * process.env.METAL_API_KEY, + * process.env.METAL_CLIENT_ID, + * process.env.METAL_INDEX_ID, + * ), + * }); + * const docs = await retriever.getRelevantDocuments("hello"); + * ``` */ export class MetalRetriever extends BaseRetriever { static lc_name() { diff --git a/langchain/src/retrievers/multi_query.ts b/langchain/src/retrievers/multi_query.ts index 0cad7d9ed45d..09a7a3bdad0f 100644 --- a/langchain/src/retrievers/multi_query.ts +++ b/langchain/src/retrievers/multi_query.ts @@ -65,7 +65,19 @@ export interface MultiQueryRetrieverInput extends BaseRetrieverInput { parserKey?: string; } -// Export class +/** + * @example + * ```typescript + * const retriever = new MultiQueryRetriever.fromLLM({ + * llm: new ChatAnthropic({}), + * retriever: new MemoryVectorStore().asRetriever(), + * verbose: true, + * }); + * const retrievedDocs = await retriever.getRelevantDocuments( + * "What are mitochondria made of?", + * ); + * ``` + */ export class MultiQueryRetriever extends BaseRetriever { static lc_name() { return "MultiQueryRetriever"; diff --git a/langchain/src/retrievers/multi_vector.ts b/langchain/src/retrievers/multi_vector.ts index f438382fb3d1..6c778d9f5a8e 100644 --- a/langchain/src/retrievers/multi_vector.ts +++ b/langchain/src/retrievers/multi_vector.ts @@ -18,6 +18,19 @@ export interface MultiVectorRetrieverInput extends BaseRetrieverInput { * A retriever that retrieves documents from a vector store and a document * store. It uses the vector store to find relevant documents based on a * query, and then retrieves the full documents from the document store. + * @example + * ```typescript + * const retriever = new MultiVectorRetriever({ + * vectorstore: new FaissStore(), + * docstore: new InMemoryStore(), + * idKey: "doc_id", + * childK: 20, + * parentK: 5, + * }); + * + * const retrieverResult = await retriever.getRelevantDocuments("justice breyer"); + * console.log(retrieverResult[0].pageContent.length); + * ``` */ export class MultiVectorRetriever extends BaseRetriever { static lc_name() { diff --git a/langchain/src/retrievers/parent_document.ts b/langchain/src/retrievers/parent_document.ts index 6199a7ca323d..3783d4bfdd27 100644 --- a/langchain/src/retrievers/parent_document.ts +++ b/langchain/src/retrievers/parent_document.ts @@ -30,6 +30,27 @@ export type ParentDocumentRetrieverFields = MultiVectorRetrieverInput & { * * This strikes a balance between better targeted retrieval with small documents * and the more context-rich larger documents. + * @example + * ```typescript + * const retriever = new ParentDocumentRetriever({ + * vectorstore: new MemoryVectorStore(new OpenAIEmbeddings()), + * docstore: new InMemoryStore(), + * parentSplitter: new RecursiveCharacterTextSplitter({ + * chunkOverlap: 0, + * chunkSize: 500, + * }), + * childSplitter: new RecursiveCharacterTextSplitter({ + * chunkOverlap: 0, + * chunkSize: 50, + * }), + * childK: 20, + * parentK: 5, + * }); + * + * const parentDocuments = await getDocuments(); + * await retriever.addDocuments(parentDocuments); + * const retrievedDocs = await retriever.getRelevantDocuments("justice breyer"); + * ``` */ export class ParentDocumentRetriever extends MultiVectorRetriever { static lc_name() { diff --git a/langchain/src/retrievers/time_weighted.ts b/langchain/src/retrievers/time_weighted.ts index 876e72a3cd7d..29c3ac51546a 100644 --- a/langchain/src/retrievers/time_weighted.ts +++ b/langchain/src/retrievers/time_weighted.ts @@ -24,6 +24,22 @@ export const BUFFER_IDX = "buffer_idx"; /** * TimeWeightedVectorStoreRetriever retrieves documents based on their time-weighted relevance. * ref: https://github.com/langchain-ai/langchain/blob/master/libs/langchain/langchain/retrievers/time_weighted_retriever.py + * @example + * ```typescript + * const retriever = new TimeWeightedVectorStoreRetriever({ + * vectorStore: new MemoryVectorStore(new OpenAIEmbeddings()), + * memoryStream: [], + * searchKwargs: 2, + * }); + * await retriever.addDocuments([ + * { pageContent: "My name is John.", metadata: {} }, + * { pageContent: "My favourite food is pizza.", metadata: {} }, + * + * ]); + * const results = await retriever.getRelevantDocuments( + * "What is my favourite food?", + * ); + * ``` */ export class TimeWeightedVectorStoreRetriever extends BaseRetriever { static lc_name() { diff --git a/langchain/src/retrievers/vespa.ts b/langchain/src/retrievers/vespa.ts index 81d1b2e5dcf0..09b3b3e46763 100644 --- a/langchain/src/retrievers/vespa.ts +++ b/langchain/src/retrievers/vespa.ts @@ -20,6 +20,21 @@ export interface VespaRetrieverParams extends RemoteRetrieverParams { * Class responsible for retrieving data from Vespa. It extends the * `RemoteRetriever` class and includes methods for creating the JSON body * for a query and processing the JSON response from Vespa. + * @example + * ```typescript + * const retriever = new VespaRetriever({ + * url: "https: + * auth: false, + * query_body: { + * yql: "select content from paragraph where userQuery()", + * hits: 5, + * ranking: "documentation", + * locale: "en-us", + * }, + * content_field: "content", + * }); + * const result = await retriever.getRelevantDocuments("what is vespa?"); + * ``` */ export class VespaRetriever extends RemoteRetriever { static lc_name() { diff --git a/langchain/src/retrievers/zep.ts b/langchain/src/retrievers/zep.ts index 3d9e10f9fb61..f87d38ca79d8 100644 --- a/langchain/src/retrievers/zep.ts +++ b/langchain/src/retrievers/zep.ts @@ -34,6 +34,16 @@ export interface ZepRetrieverConfig extends BaseRetrieverInput { /** * Class for retrieving information from a Zep long-term memory store. * Extends the BaseRetriever class. + * @example + * ```typescript + * const retriever = new ZepRetriever({ + * url: "http: + * sessionId: "session_exampleUUID", + * topK: 3, + * }); + * const query = "Can I drive red cars in France?"; + * const docs = await retriever.getRelevantDocuments(query); + * ``` */ export class ZepRetriever extends BaseRetriever { static lc_name() { diff --git a/langchain/src/schema/output_parser.ts b/langchain/src/schema/output_parser.ts index 6ce3e28b0968..ac19ec81ec1c 100644 --- a/langchain/src/schema/output_parser.ts +++ b/langchain/src/schema/output_parser.ts @@ -273,6 +273,21 @@ export abstract class BaseCumulativeTransformOutputParser< /** * OutputParser that parses LLMResult into the top likely string. + * @example + * ```typescript + * const promptTemplate = PromptTemplate.fromTemplate( + * "Tell me a joke about {topic}", + * ); + * + * const chain = RunnableSequence.from([ + * promptTemplate, + * new ChatOpenAI({}), + * new StringOutputParser(), + * ]); + * + * const result = await chain.invoke({ topic: "bears" }); + * console.log("What do you call a bear with no teeth? A gummy bear!"); + * ``` */ export class StringOutputParser extends BaseTransformOutputParser { static lc_name() { diff --git a/langchain/src/storage/file_system.ts b/langchain/src/storage/file_system.ts index c9e33d7bc230..05f7a83d7ef8 100644 --- a/langchain/src/storage/file_system.ts +++ b/langchain/src/storage/file_system.ts @@ -5,6 +5,27 @@ import { BaseStore } from "../schema/storage.js"; /** * File system implementation of the BaseStore using a dictionary. Used for * storing key-value pairs in the file system. + * @example + * ```typescript + * const store = await LocalFileStore.fromPath("./messages"); + * await store.mset( + * Array.from({ length: 5 }).map((_, index) => [ + * `message:id:${index}`, + * new TextEncoder().encode( + * JSON.stringify( + * index % 2 === 0 + * ? new AIMessage("ai stuff...") + * : new HumanMessage("human stuff..."), + * ), + * ), + * ]), + * ); + * const retrievedMessages = await store.mget(["message:id:0", "message:id:1"]); + * console.log(retrievedMessages.map((v) => new TextDecoder().decode(v))); + * for await (const key of store.yieldKeys("message:id:")) { + * await store.mdelete([key]); + * } + * ``` */ export class LocalFileStore extends BaseStore { lc_namespace = ["langchain", "storage"]; diff --git a/langchain/src/storage/in_memory.ts b/langchain/src/storage/in_memory.ts index 56ed52a6e462..a4b5441b454d 100644 --- a/langchain/src/storage/in_memory.ts +++ b/langchain/src/storage/in_memory.ts @@ -3,6 +3,21 @@ import { BaseStore } from "../schema/storage.js"; /** * In-memory implementation of the BaseStore using a dictionary. Used for * storing key-value pairs in memory. + * @example + * ```typescript + * const store = new InMemoryStore(); + * await store.mset( + * Array.from({ length: 5 }).map((_, index) => [ + * `message:id:${index}`, + * index % 2 === 0 + * ? new AIMessage("ai stuff...") + * : new HumanMessage("human stuff..."), + * ]), + * ); + * + * const retrievedMessages = await store.mget(["message:id:0", "message:id:1"]); + * await store.mdelete(await store.yieldKeys("message:id:").toArray()); + * ``` */ // eslint-disable-next-line @typescript-eslint/no-explicit-any export class InMemoryStore extends BaseStore { diff --git a/langchain/src/storage/ioredis.ts b/langchain/src/storage/ioredis.ts index 72458462e9f8..d5864f59b6ea 100644 --- a/langchain/src/storage/ioredis.ts +++ b/langchain/src/storage/ioredis.ts @@ -6,6 +6,30 @@ import { BaseStore } from "../schema/storage.js"; * Class that extends the BaseStore class to interact with a Redis * database. It provides methods for getting, setting, and deleting data, * as well as yielding keys from the database. + * @example + * ```typescript + * const store = new RedisByteStore({ client: new Redis({}) }); + * await store.mset([ + * [ + * "message:id:0", + * new TextEncoder().encode(JSON.stringify(new AIMessage("ai stuff..."))), + * ], + * [ + * "message:id:1", + * new TextEncoder().encode( + * JSON.stringify(new HumanMessage("human stuff...")), + * ), + * ], + * ]); + * const retrievedMessages = await store.mget(["message:id:0", "message:id:1"]); + * console.log(retrievedMessages.map((v) => new TextDecoder().decode(v))); + * const yieldedKeys = []; + * for await (const key of store.yieldKeys("message:id:")) { + * yieldedKeys.push(key); + * } + * console.log(yieldedKeys); + * await store.mdelete(yieldedKeys); + * ``` */ export class RedisByteStore extends BaseStore { lc_namespace = ["langchain", "storage"]; diff --git a/langchain/src/storage/upstash_redis.ts b/langchain/src/storage/upstash_redis.ts index a311da01db7b..313444399661 100644 --- a/langchain/src/storage/upstash_redis.ts +++ b/langchain/src/storage/upstash_redis.ts @@ -25,6 +25,25 @@ export interface UpstashRedisStoreInput { * Class that extends the BaseStore class to interact with an Upstash Redis * database. It provides methods for getting, setting, and deleting data, * as well as yielding keys from the database. + * @example + * ```typescript + * const store = new UpstashRedisStore({ + * client: new Redis({ + * url: "your-upstash-redis-url", + * token: "your-upstash-redis-token", + * }), + * }); + * await store.mset([ + * ["message:id:0", "encoded-ai-message"], + * ["message:id:1", "encoded-human-message"], + * ]); + * const retrievedMessages = await store.mget(["message:id:0", "message:id:1"]); + * const yieldedKeys = []; + * for await (const key of store.yieldKeys("message:id")) { + * yieldedKeys.push(key); + * } + * await store.mdelete(yieldedKeys); + * ``` */ export class UpstashRedisStore extends BaseStore { lc_namespace = ["langchain", "storage"]; diff --git a/langchain/src/storage/vercel_kv.ts b/langchain/src/storage/vercel_kv.ts index 977ed7b5acbe..9b9646377713 100644 --- a/langchain/src/storage/vercel_kv.ts +++ b/langchain/src/storage/vercel_kv.ts @@ -6,6 +6,22 @@ import { BaseStore } from "../schema/storage.js"; * Class that extends the BaseStore class to interact with a Vercel KV * database. It provides methods for getting, setting, and deleting data, * as well as yielding keys from the database. + * @example + * ```typescript + * const store = new VercelKVStore({ + * client: getClient(), + * }); + * await store.mset([ + * { key: "message:id:0", value: "encoded message 0" }, + * { key: "message:id:1", value: "encoded message 1" }, + * ]); + * const retrievedMessages = await store.mget(["message:id:0", "message:id:1"]); + * const yieldedKeys = []; + * for await (const key of store.yieldKeys("message:id:")) { + * yieldedKeys.push(key); + * } + * await store.mdelete(yieldedKeys); + * ``` */ export class VercelKVStore extends BaseStore { lc_namespace = ["langchain", "storage"]; diff --git a/langchain/src/tools/calculator.ts b/langchain/src/tools/calculator.ts index 2464edd70d97..7828b2576578 100644 --- a/langchain/src/tools/calculator.ts +++ b/langchain/src/tools/calculator.ts @@ -5,6 +5,12 @@ import { Tool } from "./base.js"; /** * The Calculator class is a tool used to evaluate mathematical * expressions. It extends the base Tool class. + * @example + * ```typescript + * const calculator = new Calculator(); + * const sum = calculator.add(99, 99); + * console.log("The sum of 99 and 99 is:", sum); + * ``` */ export class Calculator extends Tool { static lc_name() { diff --git a/langchain/src/tools/searchapi.ts b/langchain/src/tools/searchapi.ts index eab39c77c694..fb48c34581db 100644 --- a/langchain/src/tools/searchapi.ts +++ b/langchain/src/tools/searchapi.ts @@ -34,6 +34,33 @@ export type SearchApiParameters = { * * Ensure you've set the SEARCHAPI_API_KEY environment variable for authentication. * You can obtain a free API key from https://www.searchapi.io/. + * @example + * ```typescript + * const searchApi = new SearchApi("your-api-key", { + * engine: "google_news", + * }); + * const agent = RunnableSequence.from([ + * ChatPromptTemplate.fromMessages([ + * ["ai", "Answer the following questions using a bulleted list markdown format.""], + * ["human", "{input}"], + * ]), + * new ChatOpenAI({ temperature: 0 }), + * (input: BaseMessageChunk) => ({ + * log: "test", + * returnValues: { + * output: input, + * }, + * }), + * ]); + * const executor = AgentExecutor.fromAgentAndTools({ + * agent, + * tools: [searchApi], + * }); + * const res = await executor.invoke({ + * input: "What's happening in Ukraine today?"", + * }); + * console.log(res); + * ``` */ export class SearchApi extends Tool { static lc_name() { diff --git a/langchain/src/tools/searxng_search.ts b/langchain/src/tools/searxng_search.ts index 71f1512b3b3b..fd01a67bc8dc 100644 --- a/langchain/src/tools/searxng_search.ts +++ b/langchain/src/tools/searxng_search.ts @@ -91,7 +91,8 @@ interface SearxngSearchParams { * The input should be a search query, and the output is a JSON array of the query results. * * note: works best with *agentType*: `structured-chat-zero-shot-react-description` - * https://github.com/searxng/searxng */ + * https://github.com/searxng/searxng + */ export class SearxngSearch extends Tool { static lc_name() { return "SearxngSearch"; diff --git a/langchain/src/tools/wikipedia_query_run.ts b/langchain/src/tools/wikipedia_query_run.ts index 6b669849ae77..127010b46cf3 100644 --- a/langchain/src/tools/wikipedia_query_run.ts +++ b/langchain/src/tools/wikipedia_query_run.ts @@ -55,6 +55,14 @@ interface PageResult { /** * Class for interacting with and fetching data from the Wikipedia API. It * extends the Tool class. + * @example + * ```typescript + * const wikipediaQuery = new WikipediaQueryRun({ + * topKResults: 3, + * maxDocContentLength: 4000, + * }); + * const result = await wikipediaQuery.call("Langchain"); + * ``` */ export class WikipediaQueryRun extends Tool { static lc_name() { diff --git a/langchain/src/tools/wolframalpha.ts b/langchain/src/tools/wolframalpha.ts index ef8d2b1350bb..b486d95e1db2 100644 --- a/langchain/src/tools/wolframalpha.ts +++ b/langchain/src/tools/wolframalpha.ts @@ -1,5 +1,14 @@ import { Tool, ToolParams } from "./base.js"; +/** + * @example + * ```typescript + * const tool = new WolframAlphaTool({ + * appid: "YOUR_APP_ID", + * }); + * const res = await tool.invoke("What is 2 * 2?"); + * ``` + */ export class WolframAlphaTool extends Tool { appid: string;