-
Notifications
You must be signed in to change notification settings - Fork 335
/
types.ts
110 lines (93 loc) · 2.27 KB
/
types.ts
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
import { Tokenizers } from "../GlobalsHelper";
import { Event } from "../callbacks/CallbackManager";
/**
* Unified language model interface
*/
export interface LLM {
metadata: LLMMetadata;
/**
* Get a chat response from the LLM
*
* @param params
*/
chat(
params: LLMChatParamsStreaming,
): Promise<AsyncIterable<ChatResponseChunk>>;
chat(params: LLMChatParamsNonStreaming): Promise<ChatResponse>;
/**
* Get a prompt completion from the LLM
* @param params
*/
complete(
params: LLMCompletionParamsStreaming,
): Promise<AsyncIterable<CompletionResponse>>;
complete(
params: LLMCompletionParamsNonStreaming,
): Promise<CompletionResponse>;
/**
* Calculates the number of tokens needed for the given chat messages
*/
tokens(messages: ChatMessage[]): number;
}
export type MessageType =
| "user"
| "assistant"
| "system"
| "generic"
| "function"
| "memory";
export interface ChatMessage {
// TODO: use MessageContent
content: any;
role: MessageType;
}
export interface ChatResponse {
message: ChatMessage;
raw?: Record<string, any>;
}
export interface ChatResponseChunk {
delta: string;
}
export interface CompletionResponse {
text: string;
raw?: Record<string, any>;
}
export interface LLMMetadata {
model: string;
temperature: number;
topP: number;
maxTokens?: number;
contextWindow: number;
tokenizer: Tokenizers | undefined;
}
export interface LLMChatParamsBase {
messages: ChatMessage[];
parentEvent?: Event;
extraParams?: Record<string, any>;
}
export interface LLMChatParamsStreaming extends LLMChatParamsBase {
stream: true;
}
export interface LLMChatParamsNonStreaming extends LLMChatParamsBase {
stream?: false | null;
}
export interface LLMCompletionParamsBase {
prompt: any;
parentEvent?: Event;
}
export interface LLMCompletionParamsStreaming extends LLMCompletionParamsBase {
stream: true;
}
export interface LLMCompletionParamsNonStreaming
extends LLMCompletionParamsBase {
stream?: false | null;
}
export interface MessageContentDetail {
type: "text" | "image_url";
text?: string;
image_url?: { url: string };
}
/**
* Extended type for the content of a message that allows for multi-modal messages.
*/
export type MessageContent = string | MessageContentDetail[];