-
Notifications
You must be signed in to change notification settings - Fork 2.1k
/
conditional.ts
102 lines (93 loc) · 3.05 KB
/
conditional.ts
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
import type { BaseChatModel } from "../language_models/chat_models.js";
import type { BasePromptTemplate } from "../prompts/base.js";
import type { BaseLanguageModelInterface } from "../language_models/base.js";
import type { BaseLLM } from "../language_models/llms.js";
import type { PartialValues } from "../utils/types.js";
export type BaseGetPromptAsyncOptions = {
partialVariables?: PartialValues;
};
/**
* Abstract class that defines the interface for selecting a prompt for a
* given language model.
*/
export abstract class BasePromptSelector {
/**
* Abstract method that must be implemented by any class that extends
* `BasePromptSelector`. It takes a language model as an argument and
* returns a prompt template.
* @param llm The language model for which to get a prompt.
* @returns A prompt template.
*/
abstract getPrompt(llm: BaseLanguageModelInterface): BasePromptTemplate;
/**
* Asynchronous version of `getPrompt` that also accepts an options object
* for partial variables.
* @param llm The language model for which to get a prompt.
* @param options Optional object for partial variables.
* @returns A Promise that resolves to a prompt template.
*/
async getPromptAsync(
llm: BaseLanguageModelInterface,
options?: BaseGetPromptAsyncOptions
): Promise<BasePromptTemplate> {
const prompt = this.getPrompt(llm);
return prompt.partial(options?.partialVariables ?? {});
}
}
/**
* Concrete implementation of `BasePromptSelector` that selects a prompt
* based on a set of conditions. It has a default prompt that it returns
* if none of the conditions are met.
*/
export class ConditionalPromptSelector extends BasePromptSelector {
defaultPrompt: BasePromptTemplate;
conditionals: Array<
[
condition: (llm: BaseLanguageModelInterface) => boolean,
prompt: BasePromptTemplate
]
>;
constructor(
default_prompt: BasePromptTemplate,
conditionals: Array<
[
condition: (llm: BaseLanguageModelInterface) => boolean,
prompt: BasePromptTemplate
]
> = []
) {
super();
this.defaultPrompt = default_prompt;
this.conditionals = conditionals;
}
/**
* Method that selects a prompt based on a set of conditions. If none of
* the conditions are met, it returns the default prompt.
* @param llm The language model for which to get a prompt.
* @returns A prompt template.
*/
getPrompt(llm: BaseLanguageModelInterface): BasePromptTemplate {
for (const [condition, prompt] of this.conditionals) {
if (condition(llm)) {
return prompt;
}
}
return this.defaultPrompt;
}
}
/**
* Type guard function that checks if a given language model is of type
* `BaseLLM`.
*/
export function isLLM(llm: BaseLanguageModelInterface): llm is BaseLLM {
return llm._modelType() === "base_llm";
}
/**
* Type guard function that checks if a given language model is of type
* `BaseChatModel`.
*/
export function isChatModel(
llm: BaseLanguageModelInterface
): llm is BaseChatModel {
return llm._modelType() === "base_chat_model";
}