Skip to content
This repository was archived by the owner on Nov 27, 2025. It is now read-only.
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
13 changes: 13 additions & 0 deletions readme.md
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,19 @@ client.createCompletion(params: CompletionParams): Promise<{
}>
```

To get a streaming response, use the `streamCompletion` method.

```ts
client.streamCompletion(params: CompletionParams): Promise<
ReadableStream<{
/** The completion string. */
completion: string;
/** The raw response from the API. */
response: CompletionResponse;
}>
>
```

### Create Chat Completion

See: [OpenAI docs](https://beta.openai.com/docs/api-reference/chat) | [Type definitions](/src/schemas/chat-completion.ts)
Expand Down
2 changes: 1 addition & 1 deletion src/fetch-api.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ import { OpenAIApiError } from './errors';

const DEFAULT_BASE_URL = 'https://api.openai.com/v1';

export interface FetchOptions extends Options {
export interface FetchOptions extends Omit<Options, 'credentials'> {
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Thanks for catching this

credentials?: string;
}

Expand Down
74 changes: 71 additions & 3 deletions src/openai-client.ts
Original file line number Diff line number Diff line change
Expand Up @@ -12,10 +12,10 @@ import type { FetchOptions } from './fetch-api';
import type {
ChatCompletionParams,
ChatCompletionResponse,
ChatResponseMessage} from './schemas/chat-completion';
import {
ChatCompletionParamsSchema
ChatResponseMessage,
} from './schemas/chat-completion';
import { ChatCompletionParamsSchema } from './schemas/chat-completion';
import { StreamCompletionChunker } from './streaming';

export type ConfigOpts = {
/**
Expand Down Expand Up @@ -91,6 +91,49 @@ export class OpenAIClient {
return { completion, response };
}

/**
* Create a completion for a single prompt string and stream back partial progress.
* @param params typipcal standard OpenAI completion parameters
* @returns A stream of completion chunks.
*
* @example
*
* ```ts
* const client = new OpenAIClient(process.env.OPENAI_API_KEY);
* const stream = await client.streamCompletion({
* model: "text-davinci-003",
* prompt: "Give me some lyrics, make it up.",
* max_tokens: 256,
* temperature: 0,
* });
*
* for await (const chunk of stream) {
* process.stdout.write(chunk.completion);
* }
* ```
*/
async streamCompletion(params: CompletionParams): Promise<
ReadableStream<{
/** The completion string. */
completion: string;
/** The raw response from the API. */
response: CompletionResponse;
}>
> {
const reqBody = CompletionParamsSchema.parse(params);
const response = await this.api.post('completions', {
json: { ...reqBody, stream: true },
onDownloadProgress: () => {}, // trick ky to return ReadableStream.
});
const stream = response.body as ReadableStream;
return stream.pipeThrough(
new StreamCompletionChunker((response: CompletionResponse) => {
const completion = response.choices[0].text || '';
return { completion, response };
})
);
}

/**
* Create a completion for a chat message.
*/
Expand All @@ -111,6 +154,31 @@ export class OpenAIClient {
return { message, response };
}

async streamChatCompletion(params: ChatCompletionParams): Promise<
ReadableStream<{
/** The completion message. */
message: ChatResponseMessage;
/** The raw response from the API. */
response: ChatCompletionResponse;
}>
> {
const reqBody = ChatCompletionParamsSchema.parse(params);
const response = await this.api.post('chat/completions', {
json: { ...reqBody, stream: true },
onDownloadProgress: () => {}, // trick ky to return ReadableStream.
});
const stream = response.body as ReadableStream;
return stream.pipeThrough(
new StreamCompletionChunker((response: ChatCompletionResponse) => {
const message = response.choices[0].delta || {
role: 'assistant',
content: '',
};
return { message, response };
})
);
}

/**
* Create an edit for a single input string.
*/
Expand Down
2 changes: 2 additions & 0 deletions src/schemas/chat-completion.ts
Original file line number Diff line number Diff line change
Expand Up @@ -76,4 +76,6 @@ export type ChatCompletionResponseChoices = {
index?: number;
finish_reason?: string;
message?: ChatResponseMessage;
/** Used instead of `message` when streaming */
delta?: ChatResponseMessage;
}[];
71 changes: 71 additions & 0 deletions src/streaming.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,71 @@
/** A function that converts from raw Completion response from OpenAI
* into a nicer object which includes the first choice in response from OpenAI.
*/
type ResponseFactory<Raw, Nice> = (response: Raw) => Nice;

/**
* A parser for the streaming responses from the OpenAI API.
*
* Conveniently shaped like an argument for WritableStream constructor.
*/
class OpenAIStreamParser<Raw, Nice> {
private responseFactory: ResponseFactory<Raw, Nice>;
onchunk?: (chunk: Nice) => void;
onend?: () => void;

constructor(responseFactory: ResponseFactory<Raw, Nice>) {
this.responseFactory = responseFactory;
}

/**
* Takes the ReadableStream chunks, produced by `fetch` and turns them into
* `CompletionResponse` objects.
* @param chunk The chunk of data from the stream.
*/
write(chunk: Uint8Array): void {
const decoder = new TextDecoder();
const s = decoder.decode(chunk);
s.split('\n')
.map((line) => line.trim())
.filter((line) => line.length > 0)
.forEach((line) => {
const pos = line.indexOf(':');
const name = line.substring(0, pos);
if (name !== 'data') return;
const content = line.substring(pos + 1).trim();
if (content.length == 0) return;
if (content === '[DONE]') {
this.onend?.();
return;
}
try {
const parsed = JSON.parse(content);
this.onchunk?.(this.responseFactory(parsed));
} catch (e) {
console.error('Failed parsing streamed JSON chunk', e);
}
});
}
}

/**
* A transform stream that takes the streaming responses from the OpenAI API
* and turns them into useful response objects.
*/
export class StreamCompletionChunker<Raw, Nice>
implements TransformStream<Uint8Array, Nice>
{
writable: WritableStream<Uint8Array>;
readable: ReadableStream<Nice>;

constructor(responseFactory: ResponseFactory<Raw, Nice>) {
const parser = new OpenAIStreamParser(responseFactory);
this.writable = new WritableStream(parser);
this.readable = new ReadableStream({
start(controller) {
parser.onchunk = (chunk: Nice) => controller.enqueue(chunk);
parser.onend = () => controller.close();
},
});
}
}
2 changes: 1 addition & 1 deletion tsconfig.json
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
"esModuleInterop": true,
"forceConsistentCasingInFileNames": true,
"isolatedModules": true,
"lib": ["es2021"],
"lib": ["es2021", "DOM"],
"module": "commonjs",
"moduleResolution": "node",
"outDir": "dist",
Expand Down