Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .stats.yml
Original file line number Diff line number Diff line change
@@ -1,2 +1,2 @@
configured_endpoints: 68
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai-fb9db2d2c1f0d6b39d8ee042db5d5c59acba6ad1daf47c18792c1f5fb24b3401.yml
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai-aa9b01fc0c17eb0cbc200533fc20d6a49c5e764ceaf8049e08b294532be6e9ff.yml
2 changes: 1 addition & 1 deletion batch.go
Original file line number Diff line number Diff line change
Expand Up @@ -308,7 +308,7 @@ type BatchNewParams struct {
// Your input file must be formatted as a
// [JSONL file](https://platform.openai.com/docs/api-reference/batch/request-input),
// and must be uploaded with the purpose `batch`. The file can contain up to 50,000
// requests, and can be up to 100 MB in size.
// requests, and can be up to 200 MB in size.
InputFileID param.Field[string] `json:"input_file_id,required"`
// Optional custom metadata for the batch.
Metadata param.Field[map[string]string] `json:"metadata"`
Expand Down
1 change: 1 addition & 0 deletions chat.go
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@ const (
ChatModelO1Mini ChatModel = "o1-mini"
ChatModelO1Mini2024_09_12 ChatModel = "o1-mini-2024-09-12"
ChatModelGPT4o ChatModel = "gpt-4o"
ChatModelGPT4o2024_11_20 ChatModel = "gpt-4o-2024-11-20"
ChatModelGPT4o2024_08_06 ChatModel = "gpt-4o-2024-08-06"
ChatModelGPT4o2024_05_13 ChatModel = "gpt-4o-2024-05-13"
ChatModelGPT4oRealtimePreview ChatModel = "gpt-4o-realtime-preview"
Expand Down
10 changes: 6 additions & 4 deletions chatcompletion.go
Original file line number Diff line number Diff line change
Expand Up @@ -383,8 +383,9 @@ type ChatCompletionAudioParam struct {
// Specifies the output audio format. Must be one of `wav`, `mp3`, `flac`, `opus`,
// or `pcm16`.
Format param.Field[ChatCompletionAudioParamFormat] `json:"format,required"`
// The voice the model uses to respond. Supported voices are `alloy`, `ash`,
// `ballad`, `coral`, `echo`, `sage`, `shimmer`, and `verse`.
// The voice the model uses to respond. Supported voices are `ash`, `ballad`,
// `coral`, `sage`, and `verse` (also supported but not recommended are `alloy`,
// `echo`, and `shimmer`; these voices are less expressive).
Voice param.Field[ChatCompletionAudioParamVoice] `json:"voice,required"`
}

Expand Down Expand Up @@ -412,8 +413,9 @@ func (r ChatCompletionAudioParamFormat) IsKnown() bool {
return false
}

// The voice the model uses to respond. Supported voices are `alloy`, `ash`,
// `ballad`, `coral`, `echo`, `sage`, `shimmer`, and `verse`.
// The voice the model uses to respond. Supported voices are `ash`, `ballad`,
// `coral`, `sage`, and `verse` (also supported but not recommended are `alloy`,
// `echo`, and `shimmer`; these voices are less expressive).
type ChatCompletionAudioParamVoice string

const (
Expand Down
2 changes: 1 addition & 1 deletion file.go
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ func NewFileService(opts ...option.RequestOption) (r *FileService) {
// [completions](https://platform.openai.com/docs/api-reference/fine-tuning/completions-input)
// models.
//
// The Batch API only supports `.jsonl` files up to 100 MB in size. The input also
// The Batch API only supports `.jsonl` files up to 200 MB in size. The input also
// has a specific required
// [format](https://platform.openai.com/docs/api-reference/batch/request-input).
//
Expand Down