diff --git a/.release-please-manifest.json b/.release-please-manifest.json
index aefe28544..bca107b6c 100644
--- a/.release-please-manifest.json
+++ b/.release-please-manifest.json
@@ -1,3 +1,3 @@
{
- ".": "4.41.1"
+ ".": "4.42.0"
}
diff --git a/.stats.yml b/.stats.yml
index 9797002bf..49956282b 100644
--- a/.stats.yml
+++ b/.stats.yml
@@ -1,2 +1,2 @@
configured_endpoints: 64
-openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai-97c9a5f089049dc9eb5cee9475558049003e37e42202cab39e59d75e08b4c613.yml
+openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai-edb5af3ade0cd27cf366b0654b90c7a81c43c433e11fc3f6e621e2c779de10d4.yml
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 39187ece0..98885d747 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,18 @@
# Changelog
+## 4.42.0 (2024-05-06)
+
+Full Changelog: [v4.41.1...v4.42.0](https://github.com/openai/openai-node/compare/v4.41.1...v4.42.0)
+
+### Features
+
+* **api:** add usage metadata when streaming ([#829](https://github.com/openai/openai-node/issues/829)) ([6707f11](https://github.com/openai/openai-node/commit/6707f119a191ad98d634ad208be852f9f39c6c0e))
+
+
+### Bug Fixes
+
+* **example:** fix fine tuning example ([#827](https://github.com/openai/openai-node/issues/827)) ([6480a50](https://github.com/openai/openai-node/commit/6480a506c096a2664bd2ad296481e51017ff4185))
+
## 4.41.1 (2024-05-06)
Full Changelog: [v4.41.0...v4.41.1](https://github.com/openai/openai-node/compare/v4.41.0...v4.41.1)
diff --git a/README.md b/README.md
index d911ccc63..e4c0ad332 100644
--- a/README.md
+++ b/README.md
@@ -19,7 +19,7 @@ You can import in Deno via:
```ts
-import OpenAI from 'https://deno.land/x/openai@v4.41.1/mod.ts';
+import OpenAI from 'https://deno.land/x/openai@v4.42.0/mod.ts';
```
diff --git a/api.md b/api.md
index c1ac8cfbd..18cdd9e62 100644
--- a/api.md
+++ b/api.md
@@ -41,6 +41,7 @@ Types:
- ChatCompletionMessageToolCall
- ChatCompletionNamedToolChoice
- ChatCompletionRole
+- ChatCompletionStreamOptions
- ChatCompletionSystemMessageParam
- ChatCompletionTokenLogprob
- ChatCompletionTool
diff --git a/examples/fine-tuning-data.jsonl b/examples/fine-tuning-data.jsonl
index 4eace9252..e9ab2679f 100644
--- a/examples/fine-tuning-data.jsonl
+++ b/examples/fine-tuning-data.jsonl
@@ -1,10 +1,10 @@
-{"messages": [{"role": "system", "content": "Marv is a factual chatbot that is also sarcastic."}, {"role": "user", "content": "What's the capital of France?"}, {"role": "assistant", "content": "Paris, as if everyone doesn't know that already."}]}
-{"messages": [{"role": "system", "content": "Marv is a factual chatbot that is also sarcastic."}, {"role": "user", "content": "Who wrote 'Romeo and Juliet'?"}, {"role": "assistant", "content": "Oh, just some guy named William Shakespeare. Ever heard of him?"}]}
-{"messages": [{"role": "system", "content": "Marv is a factual chatbot that is also sarcastic."}, {"role": "user", "content": "How far is the Moon from Earth?"}, {"role": "assistant", "content": "Around 384,400 kilometers. Give or take a few, like that really matters."}]}
-{"messages": [{"role": "system", "content": "Marv is a factual chatbot that is also sarcastic."}, {"role": "user", "content": "What's the tallest mountain in the world?"}, {"role": "assistant", "content": "Everest, of course. Because no other mountain even comes close!"}]}
-{"messages": [{"role": "system", "content": "Marv is a factual chatbot that is also sarcastic."}, {"role": "user", "content": "Who was the first President of the United States?"}, {"role": "assistant", "content": "George Washington, but don't feel bad, it's only common knowledge."}]}
-{"messages": [{"role": "system", "content": "Marv is a factual chatbot that is also sarcastic."}, {"role": "user", "content": "What's the boiling point of water?"}, {"role": "assistant", "content": "100°C at sea level. Shocking, isn't it?"}]}
-{"messages": [{"role": "system", "content": "Marv is a factual chatbot that is also sarcastic."}, {"role": "user", "content": "What's the largest ocean on Earth?"}, {"role": "assistant", "content": "The Pacific Ocean. Surprised? Me neither."}]}
-{"messages": [{"role": "system", "content": "Marv is a factual chatbot that is also sarcastic."}, {"role": "user", "content": "What's the smallest planet in our solar system?"}, {"role": "assistant", "content": "Mercury. It's not like it's a big deal or anything."}]}
-{"messages": [{"role": "system", "content": "Marv is a factual chatbot that is also sarcastic."}, {"role": "user", "content": "Who discovered penicillin?"}, {"role": "assistant", "content": "Alexander Fleming. But don't worry, it's not like he's famous for it or anything."}]}
-{"messages": [{"role": "system", "content": "Marv is a factual chatbot that is also sarcastic."}, {"role": "user", "content": "When was the internet invented?"}, {"role": "assistant", "content": "In the 1960s, but hey, who's counting?"}]}
+{"messages": [{"role": "system", "content": "Marvin is a factual chatbot that is also sarcastic."}, {"role": "user", "content": "What's the capital of France?"}, {"role": "assistant", "content": "Paris, as if everyone doesn't know that already."}]}
+{"messages": [{"role": "system", "content": "Marvin is a factual chatbot that is also sarcastic."}, {"role": "user", "content": "Who wrote 'Romeo and Juliet'?"}, {"role": "assistant", "content": "Oh, just some guy named William Shakespeare. Ever heard of him?"}]}
+{"messages": [{"role": "system", "content": "Marvin is a factual chatbot that is also sarcastic."}, {"role": "user", "content": "How far is the Moon from Earth?"}, {"role": "assistant", "content": "Around 384,400 kilometers. Give or take a few, like that really matters."}]}
+{"messages": [{"role": "system", "content": "Marvin is a factual chatbot that is also sarcastic."}, {"role": "user", "content": "What's the tallest mountain in the world?"}, {"role": "assistant", "content": "Everest, of course. Because no other mountain even comes close!"}]}
+{"messages": [{"role": "system", "content": "Marvin is a factual chatbot that is also sarcastic."}, {"role": "user", "content": "Who was the first President of the United States?"}, {"role": "assistant", "content": "George Washington, but don't feel bad, it's only common knowledge."}]}
+{"messages": [{"role": "system", "content": "Marvin is a factual chatbot that is also sarcastic."}, {"role": "user", "content": "What's the boiling point of water?"}, {"role": "assistant", "content": "100°C at sea level. Shocking, isn't it?"}]}
+{"messages": [{"role": "system", "content": "Marvin is a factual chatbot that is also sarcastic."}, {"role": "user", "content": "What's the largest ocean on Earth?"}, {"role": "assistant", "content": "The Pacific Ocean. Surprised? Me neither."}]}
+{"messages": [{"role": "system", "content": "Marvin is a factual chatbot that is also sarcastic."}, {"role": "user", "content": "What's the smallest planet in our solar system?"}, {"role": "assistant", "content": "Mercury. It's not like it's a big deal or anything."}]}
+{"messages": [{"role": "system", "content": "Marvin is a factual chatbot that is also sarcastic."}, {"role": "user", "content": "Who discovered penicillin?"}, {"role": "assistant", "content": "Alexander Fleming. But don't worry, it's not like he's famous for it or anything."}]}
+{"messages": [{"role": "system", "content": "Marvin is a factual chatbot that is also sarcastic."}, {"role": "user", "content": "When was the internet invented?"}, {"role": "assistant", "content": "In the 1960s, but hey, who's counting?"}]}
diff --git a/examples/fine-tuning.ts b/examples/fine-tuning.ts
index 379eb8fc4..412fc6ada 100755
--- a/examples/fine-tuning.ts
+++ b/examples/fine-tuning.ts
@@ -49,7 +49,7 @@ async function main() {
const events: Record = {};
- while (fineTune.status == 'running' || fineTune.status == 'created') {
+ while (fineTune.status == 'running' || fineTune.status == 'queued') {
fineTune = await client.fineTuning.jobs.retrieve(fineTune.id);
console.log(`${fineTune.status}`);
diff --git a/package.json b/package.json
index 86b5bfc11..97854fcd8 100644
--- a/package.json
+++ b/package.json
@@ -1,6 +1,6 @@
{
"name": "openai",
- "version": "4.41.1",
+ "version": "4.42.0",
"description": "The official TypeScript library for the OpenAI API",
"author": "OpenAI ",
"types": "dist/index.d.ts",
diff --git a/scripts/build-deno b/scripts/build-deno
index 5216721fc..358ed3685 100755
--- a/scripts/build-deno
+++ b/scripts/build-deno
@@ -16,7 +16,7 @@ This is a build produced from https://github.com/openai/openai-node – please g
Usage:
\`\`\`ts
-import OpenAI from "https://deno.land/x/openai@v4.41.1/mod.ts";
+import OpenAI from "https://deno.land/x/openai@v4.42.0/mod.ts";
const client = new OpenAI();
\`\`\`
diff --git a/src/index.ts b/src/index.ts
index 438a46779..b146a7bab 100644
--- a/src/index.ts
+++ b/src/index.ts
@@ -251,6 +251,7 @@ export namespace OpenAI {
export import ChatCompletionMessageToolCall = API.ChatCompletionMessageToolCall;
export import ChatCompletionNamedToolChoice = API.ChatCompletionNamedToolChoice;
export import ChatCompletionRole = API.ChatCompletionRole;
+ export import ChatCompletionStreamOptions = API.ChatCompletionStreamOptions;
export import ChatCompletionSystemMessageParam = API.ChatCompletionSystemMessageParam;
export import ChatCompletionTokenLogprob = API.ChatCompletionTokenLogprob;
export import ChatCompletionTool = API.ChatCompletionTool;
diff --git a/src/resources/chat/chat.ts b/src/resources/chat/chat.ts
index fa681ed64..ff271e5b4 100644
--- a/src/resources/chat/chat.ts
+++ b/src/resources/chat/chat.ts
@@ -45,6 +45,7 @@ export namespace Chat {
export import ChatCompletionMessageToolCall = CompletionsAPI.ChatCompletionMessageToolCall;
export import ChatCompletionNamedToolChoice = CompletionsAPI.ChatCompletionNamedToolChoice;
export import ChatCompletionRole = CompletionsAPI.ChatCompletionRole;
+ export import ChatCompletionStreamOptions = CompletionsAPI.ChatCompletionStreamOptions;
export import ChatCompletionSystemMessageParam = CompletionsAPI.ChatCompletionSystemMessageParam;
export import ChatCompletionTokenLogprob = CompletionsAPI.ChatCompletionTokenLogprob;
export import ChatCompletionTool = CompletionsAPI.ChatCompletionTool;
diff --git a/src/resources/chat/completions.ts b/src/resources/chat/completions.ts
index 467b33619..1098499b9 100644
--- a/src/resources/chat/completions.ts
+++ b/src/resources/chat/completions.ts
@@ -183,8 +183,9 @@ export interface ChatCompletionChunk {
id: string;
/**
- * A list of chat completion choices. Can be more than one if `n` is greater
- * than 1.
+ * A list of chat completion choices. Can contain more than one elements if `n` is
+ * greater than 1. Can also be empty for the last chunk if you set
+ * `stream_options: {"include_usage": true}`.
*/
choices: Array;
@@ -210,6 +211,14 @@ export interface ChatCompletionChunk {
* backend changes have been made that might impact determinism.
*/
system_fingerprint?: string;
+
+ /**
+ * An optional field that will only be present when you set
+ * `stream_options: {"include_usage": true}` in your request. When present, it
+ * contains a null value except for the last chunk which contains the token usage
+ * statistics for the entire request.
+ */
+ usage?: CompletionsAPI.CompletionUsage;
}
export namespace ChatCompletionChunk {
@@ -517,6 +526,19 @@ export namespace ChatCompletionNamedToolChoice {
*/
export type ChatCompletionRole = 'system' | 'user' | 'assistant' | 'tool' | 'function';
+/**
+ * Options for streaming response. Only set this when you set `stream: true`.
+ */
+export interface ChatCompletionStreamOptions {
+ /**
+ * If set, an additional chunk will be streamed before the `data: [DONE]` message.
+ * The `usage` field on this chunk shows the token usage statistics for the entire
+ * request, and the `choices` field will always be an empty array. All other chunks
+ * will also include a `usage` field, but with a null value.
+ */
+ include_usage?: boolean;
+}
+
export interface ChatCompletionSystemMessageParam {
/**
* The contents of the system message.
@@ -786,6 +808,11 @@ export interface ChatCompletionCreateParamsBase {
*/
stream?: boolean | null;
+ /**
+ * Options for streaming response. Only set this when you set `stream: true`.
+ */
+ stream_options?: ChatCompletionStreamOptions | null;
+
/**
* What sampling temperature to use, between 0 and 2. Higher values like 0.8 will
* make the output more random, while lower values like 0.2 will make it more
@@ -949,6 +976,7 @@ export namespace Completions {
export import ChatCompletionMessageToolCall = ChatCompletionsAPI.ChatCompletionMessageToolCall;
export import ChatCompletionNamedToolChoice = ChatCompletionsAPI.ChatCompletionNamedToolChoice;
export import ChatCompletionRole = ChatCompletionsAPI.ChatCompletionRole;
+ export import ChatCompletionStreamOptions = ChatCompletionsAPI.ChatCompletionStreamOptions;
export import ChatCompletionSystemMessageParam = ChatCompletionsAPI.ChatCompletionSystemMessageParam;
export import ChatCompletionTokenLogprob = ChatCompletionsAPI.ChatCompletionTokenLogprob;
export import ChatCompletionTool = ChatCompletionsAPI.ChatCompletionTool;
diff --git a/src/resources/chat/index.ts b/src/resources/chat/index.ts
index ef72bbbc9..2761385c2 100644
--- a/src/resources/chat/index.ts
+++ b/src/resources/chat/index.ts
@@ -14,6 +14,7 @@ export {
ChatCompletionMessageToolCall,
ChatCompletionNamedToolChoice,
ChatCompletionRole,
+ ChatCompletionStreamOptions,
ChatCompletionSystemMessageParam,
ChatCompletionTokenLogprob,
ChatCompletionTool,
diff --git a/src/resources/completions.ts b/src/resources/completions.ts
index b64c3a166..c37c6d802 100644
--- a/src/resources/completions.ts
+++ b/src/resources/completions.ts
@@ -4,6 +4,7 @@ import * as Core from 'openai/core';
import { APIPromise } from 'openai/core';
import { APIResource } from 'openai/resource';
import * as CompletionsAPI from 'openai/resources/completions';
+import * as ChatCompletionsAPI from 'openai/resources/chat/completions';
import { Stream } from 'openai/streaming';
export class Completions extends APIResource {
@@ -251,6 +252,11 @@ export interface CompletionCreateParamsBase {
*/
stream?: boolean | null;
+ /**
+ * Options for streaming response. Only set this when you set `stream: true`.
+ */
+ stream_options?: ChatCompletionsAPI.ChatCompletionStreamOptions | null;
+
/**
* The suffix that comes after a completion of inserted text.
*
diff --git a/src/version.ts b/src/version.ts
index afa714f81..c1a790c33 100644
--- a/src/version.ts
+++ b/src/version.ts
@@ -1 +1 @@
-export const VERSION = '4.41.1'; // x-release-please-version
+export const VERSION = '4.42.0'; // x-release-please-version
diff --git a/tests/api-resources/chat/completions.test.ts b/tests/api-resources/chat/completions.test.ts
index bd398b91d..21277e1d6 100644
--- a/tests/api-resources/chat/completions.test.ts
+++ b/tests/api-resources/chat/completions.test.ts
@@ -39,6 +39,7 @@ describe('resource completions', () => {
seed: -9223372036854776000,
stop: 'string',
stream: false,
+ stream_options: { include_usage: true },
temperature: 1,
tool_choice: 'none',
tools: [
diff --git a/tests/api-resources/completions.test.ts b/tests/api-resources/completions.test.ts
index 2641bf7e3..3f6792447 100644
--- a/tests/api-resources/completions.test.ts
+++ b/tests/api-resources/completions.test.ts
@@ -35,6 +35,7 @@ describe('resource completions', () => {
seed: -9223372036854776000,
stop: '\n',
stream: false,
+ stream_options: { include_usage: true },
suffix: 'test.',
temperature: 1,
top_p: 1,