-
Notifications
You must be signed in to change notification settings - Fork 2.3k
/
Copy pathfake.ts
123 lines (103 loc) Β· 2.57 KB
/
fake.ts
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
import { BaseChatModel, BaseChatModelParams } from "./base.js";
import {
AIMessage,
AIMessageChunk,
BaseMessage,
ChatGenerationChunk,
ChatResult,
} from "../schema/index.js";
import { CallbackManagerForLLMRun } from "../callbacks/manager.js";
/**
* Interface for the input parameters specific to the Fake List Chat model.
*/
export interface FakeChatInput extends BaseChatModelParams {
/** Responses to return */
responses: string[];
/** Time to sleep in milliseconds between responses */
sleep?: number;
}
/**
* A fake Chat Model that returns a predefined list of responses. It can be used
* for testing purposes.
*/
export class FakeListChatModel extends BaseChatModel {
static lc_name() {
return "FakeListChatModel";
}
responses: string[];
i = 0;
sleep?: number;
constructor({ responses, sleep }: FakeChatInput) {
super({});
this.responses = responses;
this.sleep = sleep;
}
_combineLLMOutput() {
return [];
}
_llmType(): string {
return "fake-list";
}
async _generate(
_messages: BaseMessage[],
options?: this["ParsedCallOptions"]
): Promise<ChatResult> {
await this._sleepIfRequested();
if (options?.stop?.length) {
return {
generations: [this._formatGeneration(options.stop[0])],
};
} else {
const response = this._currentResponse();
this._incrementResponse();
return {
generations: [this._formatGeneration(response)],
llmOutput: {},
};
}
}
_formatGeneration(text: string) {
return {
message: new AIMessage(text),
text,
};
}
async *_streamResponseChunks(
_messages: BaseMessage[],
_options: this["ParsedCallOptions"],
_runManager?: CallbackManagerForLLMRun
): AsyncGenerator<ChatGenerationChunk> {
const response = this._currentResponse();
this._incrementResponse();
for await (const text of response) {
await this._sleepIfRequested();
yield this._createResponseChunk(text);
}
}
async _sleepIfRequested() {
if (this.sleep !== undefined) {
await this._sleep();
}
}
async _sleep() {
return new Promise<void>((resolve) => {
setTimeout(() => resolve(), this.sleep);
});
}
_createResponseChunk(text: string): ChatGenerationChunk {
return new ChatGenerationChunk({
message: new AIMessageChunk({ content: text }),
text,
});
}
_currentResponse() {
return this.responses[this.i];
}
_incrementResponse() {
if (this.i < this.responses.length - 1) {
this.i += 1;
} else {
this.i = 0;
}
}
}