Skip to content

Commit

Permalink
feat(api): remove content_filter stop_reason and update documentati…
Browse files Browse the repository at this point in the history
…on (#352)
  • Loading branch information
stainless-bot committed Oct 5, 2023
1 parent 656cdf1 commit a4b401e
Show file tree
Hide file tree
Showing 6 changed files with 34 additions and 27 deletions.
29 changes: 15 additions & 14 deletions src/resources/chat/completions.ts
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,7 @@ export namespace ChatCompletion {
*/
export interface ChatCompletionChunk {
/**
* A unique identifier for the chat completion chunk.
* A unique identifier for the chat completion. Each chunk has the same ID.
*/
id: string;

Expand All @@ -110,7 +110,8 @@ export interface ChatCompletionChunk {
choices: Array<ChatCompletionChunk.Choice>;

/**
* The Unix timestamp (in seconds) of when the chat completion chunk was created.
* The Unix timestamp (in seconds) of when the chat completion was created. Each
* chunk has the same timestamp.
*/
created: number;

Expand Down Expand Up @@ -139,7 +140,7 @@ export namespace ChatCompletionChunk {
* content was omitted due to a flag from our content filters, or `function_call`
* if the model called a function.
*/
finish_reason: 'stop' | 'length' | 'function_call' | 'content_filter' | null;
finish_reason: 'stop' | 'length' | 'function_call' | null;

/**
* The index of the choice in the list of choices.
Expand Down Expand Up @@ -300,7 +301,7 @@ export type ChatCompletionCreateParams =
export interface ChatCompletionCreateParamsBase {
/**
* A list of messages comprising the conversation so far.
* [Example Python code](https://github.com/openai/openai-cookbook/blob/main/examples/How_to_format_inputs_to_ChatGPT_models.ipynb).
* [Example Python code](https://cookbook.openai.com/examples/how_to_format_inputs_to_chatgpt_models).
*/
messages: Array<ChatCompletionMessageParam>;

Expand Down Expand Up @@ -333,12 +334,12 @@ export interface ChatCompletionCreateParamsBase {
frequency_penalty?: number | null;

/**
* Controls how the model responds to function calls. `none` means the model does
* not call a function, and responds to the end-user. `auto` means the model can
* pick between an end-user or calling a function. Specifying a particular function
* via `{"name": "my_function"}` forces the model to call that function. `none` is
* the default when no functions are present. `auto` is the default if functions
* are present.
* Controls how the model calls functions. "none" means the model will not call a
* function and instead generates a message. "auto" means the model can pick
* between generating a message or calling a function. Specifying a particular
* function via `{"name": "my_function"}` forces the model to call that function.
* "none" is the default when no functions are present. "auto" is the default if
* functions are present.
*/
function_call?: 'none' | 'auto' | ChatCompletionCreateParams.FunctionCallOption;

Expand All @@ -364,7 +365,7 @@ export interface ChatCompletionCreateParamsBase {
*
* The total length of input tokens and generated tokens is limited by the model's
* context length.
* [Example Python code](https://github.com/openai/openai-cookbook/blob/main/examples/How_to_count_tokens_with_tiktoken.ipynb)
* [Example Python code](https://cookbook.openai.com/examples/how_to_count_tokens_with_tiktoken)
* for counting tokens.
*/
max_tokens?: number | null;
Expand Down Expand Up @@ -394,7 +395,7 @@ export interface ChatCompletionCreateParamsBase {
* [server-sent events](https://developer.mozilla.org/en-US/docs/Web/API/Server-sent_events/Using_server-sent_events#Event_stream_format)
* as they become available, with the stream terminated by a `data: [DONE]`
* message.
* [Example Python code](https://github.com/openai/openai-cookbook/blob/main/examples/How_to_stream_completions.ipynb).
* [Example Python code](https://cookbook.openai.com/examples/how_to_stream_completions).
*/
stream?: boolean | null;

Expand Down Expand Up @@ -474,7 +475,7 @@ export interface ChatCompletionCreateParamsNonStreaming extends ChatCompletionCr
* [server-sent events](https://developer.mozilla.org/en-US/docs/Web/API/Server-sent_events/Using_server-sent_events#Event_stream_format)
* as they become available, with the stream terminated by a `data: [DONE]`
* message.
* [Example Python code](https://github.com/openai/openai-cookbook/blob/main/examples/How_to_stream_completions.ipynb).
* [Example Python code](https://cookbook.openai.com/examples/how_to_stream_completions).
*/
stream?: false | null;
}
Expand All @@ -491,7 +492,7 @@ export interface ChatCompletionCreateParamsStreaming extends ChatCompletionCreat
* [server-sent events](https://developer.mozilla.org/en-US/docs/Web/API/Server-sent_events/Using_server-sent_events#Event_stream_format)
* as they become available, with the stream terminated by a `data: [DONE]`
* message.
* [Example Python code](https://github.com/openai/openai-cookbook/blob/main/examples/How_to_stream_completions.ipynb).
* [Example Python code](https://cookbook.openai.com/examples/how_to_stream_completions).
*/
stream: true;
}
Expand Down
8 changes: 4 additions & 4 deletions src/resources/completions.ts
Original file line number Diff line number Diff line change
Expand Up @@ -205,7 +205,7 @@ export interface CompletionCreateParamsBase {
*
* The token count of your prompt plus `max_tokens` cannot exceed the model's
* context length.
* [Example Python code](https://github.com/openai/openai-cookbook/blob/main/examples/How_to_count_tokens_with_tiktoken.ipynb)
* [Example Python code](https://cookbook.openai.com/examples/how_to_count_tokens_with_tiktoken)
* for counting tokens.
*/
max_tokens?: number | null;
Expand Down Expand Up @@ -240,7 +240,7 @@ export interface CompletionCreateParamsBase {
* [server-sent events](https://developer.mozilla.org/en-US/docs/Web/API/Server-sent_events/Using_server-sent_events#Event_stream_format)
* as they become available, with the stream terminated by a `data: [DONE]`
* message.
* [Example Python code](https://github.com/openai/openai-cookbook/blob/main/examples/How_to_stream_completions.ipynb).
* [Example Python code](https://cookbook.openai.com/examples/how_to_stream_completions).
*/
stream?: boolean | null;

Expand Down Expand Up @@ -287,7 +287,7 @@ export interface CompletionCreateParamsNonStreaming extends CompletionCreatePara
* [server-sent events](https://developer.mozilla.org/en-US/docs/Web/API/Server-sent_events/Using_server-sent_events#Event_stream_format)
* as they become available, with the stream terminated by a `data: [DONE]`
* message.
* [Example Python code](https://github.com/openai/openai-cookbook/blob/main/examples/How_to_stream_completions.ipynb).
* [Example Python code](https://cookbook.openai.com/examples/how_to_stream_completions).
*/
stream?: false | null;
}
Expand All @@ -299,7 +299,7 @@ export interface CompletionCreateParamsStreaming extends CompletionCreateParamsB
* [server-sent events](https://developer.mozilla.org/en-US/docs/Web/API/Server-sent_events/Using_server-sent_events#Event_stream_format)
* as they become available, with the stream terminated by a `data: [DONE]`
* message.
* [Example Python code](https://github.com/openai/openai-cookbook/blob/main/examples/How_to_stream_completions.ipynb).
* [Example Python code](https://cookbook.openai.com/examples/how_to_stream_completions).
*/
stream: true;
}
Expand Down
2 changes: 1 addition & 1 deletion src/resources/embeddings.ts
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,7 @@ export interface EmbeddingCreateParams {
* inputs in a single request, pass an array of strings or array of token arrays.
* Each input must not exceed the max input tokens for the model (8191 tokens for
* `text-embedding-ada-002`) and cannot be an empty string.
* [Example Python code](https://github.com/openai/openai-cookbook/blob/main/examples/How_to_count_tokens_with_tiktoken.ipynb)
* [Example Python code](https://cookbook.openai.com/examples/how_to_count_tokens_with_tiktoken)
* for counting tokens.
*/
input: string | Array<string> | Array<number> | Array<Array<number>>;
Expand Down
16 changes: 8 additions & 8 deletions src/resources/files.ts
Original file line number Diff line number Diff line change
Expand Up @@ -10,10 +10,10 @@ import { Page } from 'openai/pagination';

export class Files extends APIResource {
/**
* Upload a file that contains document(s) to be used across various
* endpoints/features. Currently, the size of all the files uploaded by one
* organization can be up to 1 GB. Please contact us if you need to increase the
* storage limit.
* Upload a file that can be used across various endpoints/features. Currently, the
* size of all the files uploaded by one organization can be up to 1 GB. Please
* [contact us](https://help.openai.com/) if you need to increase the storage
* limit.
*/
create(body: FileCreateParams, options?: Core.RequestOptions): Core.APIPromise<FileObject> {
return this.post('/files', multipartFormRequestOptions({ body, ...options }));
Expand Down Expand Up @@ -143,19 +143,19 @@ export interface FileObject {

export interface FileCreateParams {
/**
* Name of the [JSON Lines](https://jsonlines.readthedocs.io/en/latest/) file to be
* uploaded.
* The file object (not file name) to be uploaded.
*
* If the `purpose` is set to "fine-tune", the file will be used for fine-tuning.
*/
file: Uploadable;

/**
* The intended purpose of the uploaded documents.
* The intended purpose of the uploaded file.
*
* Use "fine-tune" for
* [fine-tuning](https://platform.openai.com/docs/api-reference/fine-tuning). This
* allows us to validate the format of the uploaded file.
* allows us to validate the format of the uploaded file is correct for
* fine-tuning.
*/
purpose: string;
}
Expand Down
3 changes: 3 additions & 0 deletions src/resources/fine-tunes.ts
Original file line number Diff line number Diff line change
Expand Up @@ -204,6 +204,9 @@ export namespace FineTune {
}
}

/**
* Fine-tune event object
*/
export interface FineTuneEvent {
created_at: number;

Expand Down
3 changes: 3 additions & 0 deletions src/resources/fine-tuning/jobs.ts
Original file line number Diff line number Diff line change
Expand Up @@ -215,6 +215,9 @@ export namespace FineTuningJob {
}
}

/**
* Fine-tuning job event object
*/
export interface FineTuningJobEvent {
id: string;

Expand Down

0 comments on commit a4b401e

Please sign in to comment.