Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .stats.yml
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
configured_endpoints: 80
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-4bce8217a697c729ac98046d4caf2c9e826b54c427fb0ab4f98e549a2e0ce31c.yml
openapi_spec_hash: 7996d2c34cc44fe2ce9ffe93c0ab774e
config_hash: 578c5bff4208d560c0c280f13324409f
config_hash: bcd2cacdcb9fae9938f273cd167f613c
2 changes: 1 addition & 1 deletion lib/openai/models/audio/transcription_create_params.rb
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ module Models
module Audio
# @see OpenAI::Resources::Audio::Transcriptions#create
#
# @see OpenAI::Resources::Audio::Transcriptions#stream_raw
# @see OpenAI::Resources::Audio::Transcriptions#create_streaming
class TranscriptionCreateParams < OpenAI::Internal::Type::BaseModel
# @!parse
# extend OpenAI::Internal::Type::RequestParameters::Converter
Expand Down
2 changes: 1 addition & 1 deletion lib/openai/models/audio/transcription_create_response.rb
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ module Audio
#
# @see OpenAI::Resources::Audio::Transcriptions#create
#
# @see OpenAI::Resources::Audio::Transcriptions#stream_raw
# @see OpenAI::Resources::Audio::Transcriptions#create_streaming
module TranscriptionCreateResponse
extend OpenAI::Internal::Type::Union

Expand Down
2 changes: 1 addition & 1 deletion lib/openai/models/beta/threads/run.rb
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ module Beta
module Threads
# @see OpenAI::Resources::Beta::Threads::Runs#create
#
# @see OpenAI::Resources::Beta::Threads::Runs#stream_raw
# @see OpenAI::Resources::Beta::Threads::Runs#create_stream_raw
class Run < OpenAI::Internal::Type::BaseModel
# @!attribute id
# The identifier, which can be referenced in API endpoints.
Expand Down
2 changes: 1 addition & 1 deletion lib/openai/models/beta/threads/run_create_params.rb
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ module Beta
module Threads
# @see OpenAI::Resources::Beta::Threads::Runs#create
#
# @see OpenAI::Resources::Beta::Threads::Runs#stream_raw
# @see OpenAI::Resources::Beta::Threads::Runs#create_stream_raw
class RunCreateParams < OpenAI::Internal::Type::BaseModel
# @!parse
# extend OpenAI::Internal::Type::RequestParameters::Converter
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ module Beta
module Threads
# @see OpenAI::Resources::Beta::Threads::Runs#submit_tool_outputs
#
# @see OpenAI::Resources::Beta::Threads::Runs#submit_tool_stream_raw
# @see OpenAI::Resources::Beta::Threads::Runs#submit_tool_outputs_stream_raw
class RunSubmitToolOutputsParams < OpenAI::Internal::Type::BaseModel
# @!parse
# extend OpenAI::Internal::Type::RequestParameters::Converter
Expand Down
2 changes: 1 addition & 1 deletion lib/openai/models/completion.rb
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ module OpenAI
module Models
# @see OpenAI::Resources::Completions#create
#
# @see OpenAI::Resources::Completions#stream_raw
# @see OpenAI::Resources::Completions#create_streaming
class Completion < OpenAI::Internal::Type::BaseModel
# @!attribute id
# A unique identifier for the completion.
Expand Down
2 changes: 1 addition & 1 deletion lib/openai/models/completion_create_params.rb
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ module OpenAI
module Models
# @see OpenAI::Resources::Completions#create
#
# @see OpenAI::Resources::Completions#stream_raw
# @see OpenAI::Resources::Completions#create_streaming
class CompletionCreateParams < OpenAI::Internal::Type::BaseModel
# @!parse
# extend OpenAI::Internal::Type::RequestParameters::Converter
Expand Down
6 changes: 3 additions & 3 deletions lib/openai/resources/audio/transcriptions.rb
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ class Transcriptions
def create(params)
parsed, options = OpenAI::Models::Audio::TranscriptionCreateParams.dump_request(params)
if parsed[:stream]
message = "Please use `#stream_raw` for the streaming use case."
message = "Please use `#create_streaming` for the streaming use case."
raise ArgumentError.new(message)
end
@client.request(
Expand All @@ -39,7 +39,7 @@ def create(params)

# Transcribes audio into the input language.
#
# @overload stream_raw(file:, model:, include: nil, language: nil, prompt: nil, response_format: nil, temperature: nil, timestamp_granularities: nil, request_options: {})
# @overload create_streaming(file:, model:, include: nil, language: nil, prompt: nil, response_format: nil, temperature: nil, timestamp_granularities: nil, request_options: {})
#
# @param file [IO, StringIO]
# @param model [String, Symbol, OpenAI::Models::AudioModel]
Expand All @@ -54,7 +54,7 @@ def create(params)
# @return [OpenAI::Internal::Stream<OpenAI::Models::Audio::TranscriptionTextDeltaEvent, OpenAI::Models::Audio::TranscriptionTextDoneEvent>]
#
# @see OpenAI::Models::Audio::TranscriptionCreateParams
def stream_raw(params)
def create_streaming(params)
parsed, options = OpenAI::Models::Audio::TranscriptionCreateParams.dump_request(params)
unless parsed.fetch(:stream, true)
message = "Please use `#create` for the non-streaming use case."
Expand Down
12 changes: 6 additions & 6 deletions lib/openai/resources/beta/threads/runs.rb
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ class Runs
def create(thread_id, params)
parsed, options = OpenAI::Models::Beta::Threads::RunCreateParams.dump_request(params)
if parsed[:stream]
message = "Please use `#stream_raw` for the streaming use case."
message = "Please use `#create_stream_raw` for the streaming use case."
raise ArgumentError.new(message)
end
query_params = [:include]
Expand All @@ -54,7 +54,7 @@ def create(thread_id, params)

# Create a run.
#
# @overload stream_raw(thread_id, assistant_id:, include: nil, additional_instructions: nil, additional_messages: nil, instructions: nil, max_completion_tokens: nil, max_prompt_tokens: nil, metadata: nil, model: nil, parallel_tool_calls: nil, reasoning_effort: nil, response_format: nil, temperature: nil, tool_choice: nil, tools: nil, top_p: nil, truncation_strategy: nil, request_options: {})
# @overload create_stream_raw(thread_id, assistant_id:, include: nil, additional_instructions: nil, additional_messages: nil, instructions: nil, max_completion_tokens: nil, max_prompt_tokens: nil, metadata: nil, model: nil, parallel_tool_calls: nil, reasoning_effort: nil, response_format: nil, temperature: nil, tool_choice: nil, tools: nil, top_p: nil, truncation_strategy: nil, request_options: {})
#
# @param thread_id [String]
# @param assistant_id [String]
Expand All @@ -79,7 +79,7 @@ def create(thread_id, params)
# @return [OpenAI::Internal::Stream<OpenAI::Models::Beta::AssistantStreamEvent::ThreadCreated, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCreated, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunQueued, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunInProgress, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunRequiresAction, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCompleted, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunIncomplete, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunFailed, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCancelling, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCancelled, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunExpired, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCreated, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepInProgress, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepDelta, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCompleted, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepFailed, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCancelled, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepExpired, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageCreated, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageInProgress, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageDelta, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageCompleted, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageIncomplete, OpenAI::Models::Beta::AssistantStreamEvent::ErrorEvent>]
#
# @see OpenAI::Models::Beta::Threads::RunCreateParams
def stream_raw(thread_id, params)
def create_stream_raw(thread_id, params)
parsed, options = OpenAI::Models::Beta::Threads::RunCreateParams.dump_request(params)
unless parsed.fetch(:stream, true)
message = "Please use `#create` for the non-streaming use case."
Expand Down Expand Up @@ -220,7 +220,7 @@ def cancel(run_id, params)
def submit_tool_outputs(run_id, params)
parsed, options = OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams.dump_request(params)
if parsed[:stream]
message = "Please use `#submit_tool_stream_raw` for the streaming use case."
message = "Please use `#submit_tool_outputs_stream_raw` for the streaming use case."
raise ArgumentError.new(message)
end
thread_id =
Expand All @@ -241,7 +241,7 @@ def submit_tool_outputs(run_id, params)
# tool calls once they're all completed. All outputs must be submitted in a single
# request.
#
# @overload submit_tool_stream_raw(run_id, thread_id:, tool_outputs:, request_options: {})
# @overload submit_tool_outputs_stream_raw(run_id, thread_id:, tool_outputs:, request_options: {})
#
# @param run_id [String]
# @param thread_id [String]
Expand All @@ -251,7 +251,7 @@ def submit_tool_outputs(run_id, params)
# @return [OpenAI::Internal::Stream<OpenAI::Models::Beta::AssistantStreamEvent::ThreadCreated, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCreated, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunQueued, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunInProgress, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunRequiresAction, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCompleted, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunIncomplete, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunFailed, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCancelling, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCancelled, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunExpired, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCreated, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepInProgress, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepDelta, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCompleted, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepFailed, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCancelled, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepExpired, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageCreated, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageInProgress, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageDelta, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageCompleted, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageIncomplete, OpenAI::Models::Beta::AssistantStreamEvent::ErrorEvent>]
#
# @see OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams
def submit_tool_stream_raw(run_id, params)
def submit_tool_outputs_stream_raw(run_id, params)
parsed, options = OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams.dump_request(params)
unless parsed.fetch(:stream, true)
message = "Please use `#submit_tool_outputs` for the non-streaming use case."
Expand Down
6 changes: 3 additions & 3 deletions lib/openai/resources/completions.rb
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ class Completions
def create(params)
parsed, options = OpenAI::Models::CompletionCreateParams.dump_request(params)
if parsed[:stream]
message = "Please use `#stream_raw` for the streaming use case."
message = "Please use `#create_streaming` for the streaming use case."
raise ArgumentError.new(message)
end
@client.request(
Expand All @@ -46,7 +46,7 @@ def create(params)

# Creates a completion for the provided prompt and parameters.
#
# @overload stream_raw(model:, prompt:, best_of: nil, echo: nil, frequency_penalty: nil, logit_bias: nil, logprobs: nil, max_tokens: nil, n: nil, presence_penalty: nil, seed: nil, stop: nil, stream_options: nil, suffix: nil, temperature: nil, top_p: nil, user: nil, request_options: {})
# @overload create_streaming(model:, prompt:, best_of: nil, echo: nil, frequency_penalty: nil, logit_bias: nil, logprobs: nil, max_tokens: nil, n: nil, presence_penalty: nil, seed: nil, stop: nil, stream_options: nil, suffix: nil, temperature: nil, top_p: nil, user: nil, request_options: {})
#
# @param model [String, Symbol, OpenAI::Models::CompletionCreateParams::Model]
# @param prompt [String, Array<String>, Array<Integer>, Array<Array<Integer>>, nil]
Expand All @@ -70,7 +70,7 @@ def create(params)
# @return [OpenAI::Internal::Stream<OpenAI::Models::Completion>]
#
# @see OpenAI::Models::CompletionCreateParams
def stream_raw(params)
def create_streaming(params)
parsed, options = OpenAI::Models::CompletionCreateParams.dump_request(params)
unless parsed.fetch(:stream, true)
message = "Please use `#create` for the non-streaming use case."
Expand Down
10 changes: 5 additions & 5 deletions rbi/lib/openai/resources/audio/transcriptions.rbi
Original file line number Diff line number Diff line change
Expand Up @@ -59,8 +59,8 @@ module OpenAI
# is no additional latency for segment timestamps, but generating word timestamps
# incurs additional latency.
timestamp_granularities: nil,
# There is no need to provide `stream:`. Instead, use `#stream_raw` or `#create`
# for streaming and non-streaming use cases, respectively.
# There is no need to provide `stream:`. Instead, use `#create_streaming` or
# `#create` for streaming and non-streaming use cases, respectively.
stream: false,
request_options: {}
)
Expand Down Expand Up @@ -89,7 +89,7 @@ module OpenAI
]
)
end
def stream_raw(
def create_streaming(
# The audio file object (not file name) to transcribe, in one of these formats:
# flac, mp3, mp4, mpeg, mpga, m4a, ogg, wav, or webm.
file:,
Expand Down Expand Up @@ -128,8 +128,8 @@ module OpenAI
# is no additional latency for segment timestamps, but generating word timestamps
# incurs additional latency.
timestamp_granularities: nil,
# There is no need to provide `stream:`. Instead, use `#stream_raw` or `#create`
# for streaming and non-streaming use cases, respectively.
# There is no need to provide `stream:`. Instead, use `#create_streaming` or
# `#create` for streaming and non-streaming use cases, respectively.
stream: true,
request_options: {}
)
Expand Down
22 changes: 12 additions & 10 deletions rbi/lib/openai/resources/beta/threads/runs.rbi
Original file line number Diff line number Diff line change
Expand Up @@ -170,8 +170,8 @@ module OpenAI
# Body param: Controls for how a thread will be truncated prior to the run. Use
# this to control the intial context window of the run.
truncation_strategy: nil,
# There is no need to provide `stream:`. Instead, use `#stream_raw` or `#create`
# for streaming and non-streaming use cases, respectively.
# There is no need to provide `stream:`. Instead, use `#create_stream_raw` or
# `#create` for streaming and non-streaming use cases, respectively.
stream: false,
request_options: {}
)
Expand Down Expand Up @@ -259,7 +259,7 @@ module OpenAI
]
)
end
def stream_raw(
def create_stream_raw(
# Path param: The ID of the thread to run.
thread_id,
# Body param: The ID of the
Expand Down Expand Up @@ -368,8 +368,8 @@ module OpenAI
# Body param: Controls for how a thread will be truncated prior to the run. Use
# this to control the intial context window of the run.
truncation_strategy: nil,
# There is no need to provide `stream:`. Instead, use `#stream_raw` or `#create`
# for streaming and non-streaming use cases, respectively.
# There is no need to provide `stream:`. Instead, use `#create_stream_raw` or
# `#create` for streaming and non-streaming use cases, respectively.
stream: true,
request_options: {}
)
Expand Down Expand Up @@ -497,8 +497,9 @@ module OpenAI
thread_id:,
# Body param: A list of tools for which the outputs are being submitted.
tool_outputs:,
# There is no need to provide `stream:`. Instead, use `#submit_tool_stream_raw` or
# `#submit_tool_outputs` for streaming and non-streaming use cases, respectively.
# There is no need to provide `stream:`. Instead, use
# `#submit_tool_outputs_stream_raw` or `#submit_tool_outputs` for streaming and
# non-streaming use cases, respectively.
stream: false,
request_options: {}
)
Expand Down Expand Up @@ -547,7 +548,7 @@ module OpenAI
]
)
end
def submit_tool_stream_raw(
def submit_tool_outputs_stream_raw(
# Path param: The ID of the run that requires the tool output submission.
run_id,
# Path param: The ID of the
Expand All @@ -556,8 +557,9 @@ module OpenAI
thread_id:,
# Body param: A list of tools for which the outputs are being submitted.
tool_outputs:,
# There is no need to provide `stream:`. Instead, use `#submit_tool_stream_raw` or
# `#submit_tool_outputs` for streaming and non-streaming use cases, respectively.
# There is no need to provide `stream:`. Instead, use
# `#submit_tool_outputs_stream_raw` or `#submit_tool_outputs` for streaming and
# non-streaming use cases, respectively.
stream: true,
request_options: {}
)
Expand Down
10 changes: 5 additions & 5 deletions rbi/lib/openai/resources/completions.rbi
Original file line number Diff line number Diff line change
Expand Up @@ -139,8 +139,8 @@ module OpenAI
# and detect abuse.
# [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids).
user: nil,
# There is no need to provide `stream:`. Instead, use `#stream_raw` or `#create`
# for streaming and non-streaming use cases, respectively.
# There is no need to provide `stream:`. Instead, use `#create_streaming` or
# `#create` for streaming and non-streaming use cases, respectively.
stream: false,
request_options: {}
)
Expand Down Expand Up @@ -178,7 +178,7 @@ module OpenAI
)
.returns(OpenAI::Internal::Stream[OpenAI::Models::Completion])
end
def stream_raw(
def create_streaming(
# ID of the model to use. You can use the
# [List models](https://platform.openai.com/docs/api-reference/models/list) API to
# see all of your available models, or see our
Expand Down Expand Up @@ -282,8 +282,8 @@ module OpenAI
# and detect abuse.
# [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids).
user: nil,
# There is no need to provide `stream:`. Instead, use `#stream_raw` or `#create`
# for streaming and non-streaming use cases, respectively.
# There is no need to provide `stream:`. Instead, use `#create_streaming` or
# `#create` for streaming and non-streaming use cases, respectively.
stream: true,
request_options: {}
)
Expand Down
2 changes: 1 addition & 1 deletion sig/openai/resources/audio/transcriptions.rbs
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ module OpenAI
?request_options: OpenAI::request_opts
) -> OpenAI::Models::Audio::transcription_create_response

def stream_raw: (
def create_streaming: (
file: IO | StringIO,
model: OpenAI::Models::Audio::TranscriptionCreateParams::model,
?include: ::Array[OpenAI::Models::Audio::transcription_include],
Expand Down
Loading