-
Notifications
You must be signed in to change notification settings - Fork 2.1k
/
ollama_functions.ts
160 lines (144 loc) Β· 4.81 KB
/
ollama_functions.ts
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
import { AIMessage, BaseMessage } from "@langchain/core/messages";
import { ChatResult } from "@langchain/core/outputs";
import {
BaseChatModel,
BaseChatModelParams,
} from "@langchain/core/language_models/chat_models";
import { SystemMessagePromptTemplate } from "@langchain/core/prompts";
import { BaseFunctionCallOptions } from "@langchain/core/language_models/base";
import { CallbackManagerForLLMRun } from "@langchain/core/callbacks/manager";
import { ChatOllama } from "@langchain/community/chat_models/ollama";
import { type ChatOllamaInput } from "../../chat_models/ollama.js";
const DEFAULT_TOOL_SYSTEM_TEMPLATE = `You have access to the following tools:
{tools}
You must always select one of the above tools and respond with only a JSON object matching the following schema:
{{
"tool": <name of the selected tool>,
"tool_input": <parameters for the selected tool, matching the tool's JSON schema>
}}`;
export interface ChatOllamaFunctionsCallOptions
extends BaseFunctionCallOptions {}
export type OllamaFunctionsInput = Partial<ChatOllamaInput> &
BaseChatModelParams & {
llm?: ChatOllama;
toolSystemPromptTemplate?: string;
};
export class OllamaFunctions extends BaseChatModel<ChatOllamaFunctionsCallOptions> {
llm: ChatOllama;
toolSystemPromptTemplate: string = DEFAULT_TOOL_SYSTEM_TEMPLATE;
protected defaultResponseFunction = {
name: "__conversational_response",
description:
"Respond conversationally if no other tools should be called for a given query.",
parameters: {
type: "object",
properties: {
response: {
type: "string",
description: "Conversational response to the user.",
},
},
required: ["response"],
},
};
lc_namespace = ["langchain", "experimental", "chat_models"];
static lc_name(): string {
return "OllamaFunctions";
}
constructor(fields?: OllamaFunctionsInput) {
super(fields ?? {});
this.llm = fields?.llm ?? new ChatOllama({ ...fields, format: "json" });
this.toolSystemPromptTemplate =
fields?.toolSystemPromptTemplate ?? this.toolSystemPromptTemplate;
}
invocationParams() {
return this.llm.invocationParams();
}
/** @ignore */
_identifyingParams() {
return this.llm._identifyingParams();
}
async _generate(
messages: BaseMessage[],
options: this["ParsedCallOptions"],
runManager?: CallbackManagerForLLMRun | undefined
): Promise<ChatResult> {
let functions = options.functions ?? [];
if (options.function_call !== undefined) {
functions = functions.filter(
(fn) => fn.name === options.function_call?.name
);
if (!functions.length) {
throw new Error(
`If "function_call" is specified, you must also pass a matching function in "functions".`
);
}
} else if (functions.length === 0) {
functions.push(this.defaultResponseFunction);
}
const systemPromptTemplate = SystemMessagePromptTemplate.fromTemplate(
this.toolSystemPromptTemplate
);
const systemMessage = await systemPromptTemplate.format({
tools: JSON.stringify(functions, null, 2),
});
const chatResult = await this.llm._generate(
[systemMessage, ...messages],
options,
runManager
);
const chatGenerationContent = chatResult.generations[0].message.content;
if (typeof chatGenerationContent !== "string") {
throw new Error("OllamaFunctions does not support non-string output.");
}
let parsedChatResult;
try {
parsedChatResult = JSON.parse(chatGenerationContent);
} catch (e) {
throw new Error(
`"${this.llm.model}" did not respond with valid JSON. Please try again.`
);
}
const calledToolName = parsedChatResult.tool;
const calledToolArguments = parsedChatResult.tool_input;
const calledTool = functions.find((fn) => fn.name === calledToolName);
if (calledTool === undefined) {
throw new Error(
`Failed to parse a function call from ${this.llm.model} output: ${chatGenerationContent}`
);
}
if (calledTool.name === this.defaultResponseFunction.name) {
return {
generations: [
{
message: new AIMessage({
content: calledToolArguments.response,
}),
text: calledToolArguments.response,
},
],
};
}
const responseMessageWithFunctions = new AIMessage({
content: "",
additional_kwargs: {
function_call: {
name: calledToolName,
arguments: calledToolArguments
? JSON.stringify(calledToolArguments)
: "",
},
},
});
return {
generations: [{ message: responseMessageWithFunctions, text: "" }],
};
}
_llmType(): string {
return "ollama_functions";
}
/** @ignore */
_combineLLMOutput() {
return [];
}
}