-
Notifications
You must be signed in to change notification settings - Fork 21
/
openai-client.ts
135 lines (124 loc) · 4.11 KB
/
openai-client.ts
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
import { type OpenAI } from 'openai';
import { createApiInstance, type KyOptions } from './fetch-api.js';
import { StreamCompletionChunker } from './streaming.js';
import {
type ChatParams,
type ChatResponse,
type ChatStreamParams,
type ChatStreamResponse,
type CompletionParams,
type CompletionResponse,
type CompletionStreamParams,
type CompletionStreamResponse,
type EmbeddingParams,
type EmbeddingResponse,
} from './types.js';
export type ConfigOpts = {
/**
* The API key used to authenticate with the OpenAI API.
* @see https://platform.openai.com/account/api-keys
*/
apiKey?: string;
/**
* The organization ID that should be billed for API requests.
* This is only necessary if your API key is scoped to multiple organizations.
* @see https://platform.openai.com/docs/api-reference/organization-optional
*/
organizationId?: string;
/**
* The HTTP endpoint for the OpenAI API. You probably don't want to change this.
* @default https://api.openai.com/v1
*/
baseUrl?: string;
/**
* Options to pass to the underlying fetch library (Ky).
* @see https://github.com/sindresorhus/ky/tree/main#options
*/
kyOptions?: KyOptions;
};
/** Override the default Ky options for a single request. */
type RequestOpts = {
headers?: KyOptions['headers'];
signal?: AbortSignal;
};
export class OpenAIClient {
private api: ReturnType<typeof createApiInstance>;
constructor(opts: ConfigOpts = {}) {
const process = globalThis.process || { env: {} };
const apiKey = opts.apiKey || process.env.OPENAI_API_KEY;
const organizationId = opts.organizationId || process.env.OPENAI_ORG_ID;
if (!apiKey)
throw new Error(
'Missing OpenAI API key. Please provide one in the config or set the OPENAI_API_KEY environment variable.'
);
this.api = createApiInstance({
apiKey,
baseUrl: opts.baseUrl,
organizationId,
kyOptions: opts.kyOptions,
});
}
private getApi(opts?: RequestOpts) {
return opts ? this.api.extend(opts) : this.api;
}
/** Create a completion for a chat message. */
async createChatCompletion(
params: ChatParams,
opts?: RequestOpts
): Promise<ChatResponse> {
const response: OpenAI.ChatCompletion = await this.getApi(opts)
.post('chat/completions', { json: params })
.json();
return response;
}
/** Create a chat completion and stream back partial progress. */
async streamChatCompletion(
params: ChatStreamParams,
opts?: RequestOpts
): Promise<ChatStreamResponse> {
const response = await this.getApi(opts).post('chat/completions', {
json: { ...params, stream: true },
onDownloadProgress: () => {}, // trick ky to return ReadableStream.
});
const stream = response.body as ReadableStream;
return stream.pipeThrough(
new StreamCompletionChunker(
(response: OpenAI.ChatCompletionChunk) => response
)
);
}
/** Create completions for an array of prompt strings. */
async createCompletions(
params: CompletionParams,
opts?: RequestOpts
): Promise<CompletionResponse> {
const response: OpenAI.Completion = await this.getApi(opts)
.post('completions', { json: params })
.json();
return response;
}
/** Create a completion for a single prompt string and stream back partial progress. */
async streamCompletion(
params: CompletionStreamParams,
opts?: RequestOpts
): Promise<CompletionStreamResponse> {
const response = await this.getApi(opts).post('completions', {
json: { ...params, stream: true },
onDownloadProgress: () => {}, // trick ky to return ReadableStream.
});
const stream = response.body as ReadableStream;
return stream.pipeThrough(
new StreamCompletionChunker((response: OpenAI.Completion) => response)
);
}
/** Create an embedding vector representing the input text. */
async createEmbeddings(
params: EmbeddingParams,
opts?: RequestOpts
): Promise<EmbeddingResponse> {
const response: OpenAI.CreateEmbeddingResponse = await this.getApi(opts)
.post('embeddings', { json: params })
.json();
return response;
}
}