-
Notifications
You must be signed in to change notification settings - Fork 0
/
interfaces.ts
52 lines (43 loc) · 1.11 KB
/
interfaces.ts
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
import { LmProviderType, ModelConf } from "@locallm/types";
import { LlamaTokenizer } from "llama-tokenizer-js";
import { Llama3Tokenizer } from "llama3-tokenizer-js";
import mistralTokenizer from 'mistral-tokenizer-js';
//import { LmProviderType, ModelConf } from "./packages/types/interfaces";
interface ApiState {
isRunning: boolean;
isStreaming: boolean;
isModelLoaded: boolean;
isLoadingModel: boolean;
isModelMultimodal: boolean;
model: ModelConf;
}
interface LmBackend {
name: string;
providerType: LmProviderType;
serverUrl: string;
apiKey: string;
enabled: boolean;
}
/*interface Task {
name: string;
template: string;
modelConf: ModelConf;
inferParams?: OptionalInferParams
}*/
interface GbnfGrammar {
name: string;
code: string;
}
type FormatMode = "Html" | "Text" | "Markdown" | "Json";
type TabType = "prompts" | "templates" | "grammars";
type LmTokenizer = LlamaTokenizer | Llama3Tokenizer | typeof mistralTokenizer;
type LmTokenizerType = "Llama 2" | "Llama 3" | "Mistral";
export {
FormatMode,
ApiState,
TabType,
LmBackend,
GbnfGrammar,
LmTokenizer,
LmTokenizerType,
}