-
Notifications
You must be signed in to change notification settings - Fork 2k
/
eval_chain.ts
64 lines (57 loc) 路 1.8 KB
/
eval_chain.ts
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
import type { BaseLanguageModelInterface } from "@langchain/core/language_models/base";
import { ChainValues } from "@langchain/core/utils/types";
import { PromptTemplate } from "@langchain/core/prompts";
import { QA_PROMPT } from "./prompt.js";
import { LLMChain, LLMChainInput } from "../../chains/llm_chain.js";
export interface EvaluateArgs {
questionKey: string;
answerKey: string;
predictionKey: string;
}
const eqSet = (xs: Set<string>, ys: Set<string>) =>
xs.size === ys.size && [...xs].every((x) => ys.has(x));
export class QAEvalChain extends LLMChain {
static lc_name() {
return "QAEvalChain";
}
static fromLlm(
llm: BaseLanguageModelInterface,
options: {
prompt?: PromptTemplate;
chainInput?: Omit<LLMChainInput, "llm">;
} = {}
): QAEvalChain {
const prompt = options.prompt || QA_PROMPT;
const expectedInputVars: Set<string> = new Set([
"query",
"answer",
"result",
]);
// Create a Set from inputVariables for a valid comparison
const inputVarsSet: Set<string> = new Set(prompt.inputVariables);
if (!eqSet(expectedInputVars, inputVarsSet)) {
throw new Error(
`Input variables should be ${[...expectedInputVars]}, but got ${
prompt.inputVariables
}`
);
}
return new this({ llm, prompt, ...options.chainInput });
}
async evaluate(
examples: ChainValues,
predictions: ChainValues,
args: EvaluateArgs = {
questionKey: "query",
answerKey: "answer",
predictionKey: "result",
}
): Promise<ChainValues> {
const inputs = examples.map((example: ChainValues, i: number) => ({
query: example[args.questionKey],
answer: example[args.answerKey],
result: predictions[i][args.predictionKey],
}));
return await this.apply(inputs);
}
}