diff --git a/rbi/lib/openai/models/audio/transcription.rbi b/rbi/lib/openai/models/audio/transcription.rbi index ac4346e0..bc60292b 100644 --- a/rbi/lib/openai/models/audio/transcription.rbi +++ b/rbi/lib/openai/models/audio/transcription.rbi @@ -21,8 +21,8 @@ module OpenAI end sig do - params(_: T::Array[OpenAI::Models::Audio::Transcription::Logprob]) - .returns(T::Array[OpenAI::Models::Audio::Transcription::Logprob]) + params(_: T::Array[T.any(OpenAI::Models::Audio::Transcription::Logprob, OpenAI::Util::AnyHash)]) + .returns(T::Array[T.any(OpenAI::Models::Audio::Transcription::Logprob, OpenAI::Util::AnyHash)]) end def logprobs=(_) end @@ -30,7 +30,10 @@ module OpenAI # Represents a transcription response returned by model, based on the provided # input. sig do - params(text: String, logprobs: T::Array[OpenAI::Models::Audio::Transcription::Logprob]) + params( + text: String, + logprobs: T::Array[T.any(OpenAI::Models::Audio::Transcription::Logprob, OpenAI::Util::AnyHash)] + ) .returns(T.attached_class) end def self.new(text:, logprobs: nil) diff --git a/rbi/lib/openai/models/audio/transcription_text_delta_event.rbi b/rbi/lib/openai/models/audio/transcription_text_delta_event.rbi index 6c73838b..0f931bf2 100644 --- a/rbi/lib/openai/models/audio/transcription_text_delta_event.rbi +++ b/rbi/lib/openai/models/audio/transcription_text_delta_event.rbi @@ -30,8 +30,12 @@ module OpenAI end sig do - params(_: T::Array[OpenAI::Models::Audio::TranscriptionTextDeltaEvent::Logprob]) - .returns(T::Array[OpenAI::Models::Audio::TranscriptionTextDeltaEvent::Logprob]) + params( + _: T::Array[T.any(OpenAI::Models::Audio::TranscriptionTextDeltaEvent::Logprob, OpenAI::Util::AnyHash)] + ) + .returns( + T::Array[T.any(OpenAI::Models::Audio::TranscriptionTextDeltaEvent::Logprob, OpenAI::Util::AnyHash)] + ) end def logprobs=(_) end @@ -43,7 +47,7 @@ module OpenAI sig do params( delta: String, - logprobs: T::Array[OpenAI::Models::Audio::TranscriptionTextDeltaEvent::Logprob], + logprobs: T::Array[T.any(OpenAI::Models::Audio::TranscriptionTextDeltaEvent::Logprob, OpenAI::Util::AnyHash)], type: Symbol ) .returns(T.attached_class) diff --git a/rbi/lib/openai/models/audio/transcription_text_done_event.rbi b/rbi/lib/openai/models/audio/transcription_text_done_event.rbi index fb616718..044cbf7c 100644 --- a/rbi/lib/openai/models/audio/transcription_text_done_event.rbi +++ b/rbi/lib/openai/models/audio/transcription_text_done_event.rbi @@ -31,8 +31,12 @@ module OpenAI end sig do - params(_: T::Array[OpenAI::Models::Audio::TranscriptionTextDoneEvent::Logprob]) - .returns(T::Array[OpenAI::Models::Audio::TranscriptionTextDoneEvent::Logprob]) + params( + _: T::Array[T.any(OpenAI::Models::Audio::TranscriptionTextDoneEvent::Logprob, OpenAI::Util::AnyHash)] + ) + .returns( + T::Array[T.any(OpenAI::Models::Audio::TranscriptionTextDoneEvent::Logprob, OpenAI::Util::AnyHash)] + ) end def logprobs=(_) end @@ -44,7 +48,7 @@ module OpenAI sig do params( text: String, - logprobs: T::Array[OpenAI::Models::Audio::TranscriptionTextDoneEvent::Logprob], + logprobs: T::Array[T.any(OpenAI::Models::Audio::TranscriptionTextDoneEvent::Logprob, OpenAI::Util::AnyHash)], type: Symbol ) .returns(T.attached_class) diff --git a/rbi/lib/openai/models/audio/transcription_verbose.rbi b/rbi/lib/openai/models/audio/transcription_verbose.rbi index 55eb1b23..aba3ac2a 100644 --- a/rbi/lib/openai/models/audio/transcription_verbose.rbi +++ b/rbi/lib/openai/models/audio/transcription_verbose.rbi @@ -37,8 +37,8 @@ module OpenAI end sig do - params(_: T::Array[OpenAI::Models::Audio::TranscriptionSegment]) - .returns(T::Array[OpenAI::Models::Audio::TranscriptionSegment]) + params(_: T::Array[T.any(OpenAI::Models::Audio::TranscriptionSegment, OpenAI::Util::AnyHash)]) + .returns(T::Array[T.any(OpenAI::Models::Audio::TranscriptionSegment, OpenAI::Util::AnyHash)]) end def segments=(_) end @@ -49,8 +49,8 @@ module OpenAI end sig do - params(_: T::Array[OpenAI::Models::Audio::TranscriptionWord]) - .returns(T::Array[OpenAI::Models::Audio::TranscriptionWord]) + params(_: T::Array[T.any(OpenAI::Models::Audio::TranscriptionWord, OpenAI::Util::AnyHash)]) + .returns(T::Array[T.any(OpenAI::Models::Audio::TranscriptionWord, OpenAI::Util::AnyHash)]) end def words=(_) end @@ -62,8 +62,8 @@ module OpenAI duration: Float, language: String, text: String, - segments: T::Array[OpenAI::Models::Audio::TranscriptionSegment], - words: T::Array[OpenAI::Models::Audio::TranscriptionWord] + segments: T::Array[T.any(OpenAI::Models::Audio::TranscriptionSegment, OpenAI::Util::AnyHash)], + words: T::Array[T.any(OpenAI::Models::Audio::TranscriptionWord, OpenAI::Util::AnyHash)] ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/audio/translation_verbose.rbi b/rbi/lib/openai/models/audio/translation_verbose.rbi index bceb7944..8bceffaa 100644 --- a/rbi/lib/openai/models/audio/translation_verbose.rbi +++ b/rbi/lib/openai/models/audio/translation_verbose.rbi @@ -37,8 +37,8 @@ module OpenAI end sig do - params(_: T::Array[OpenAI::Models::Audio::TranscriptionSegment]) - .returns(T::Array[OpenAI::Models::Audio::TranscriptionSegment]) + params(_: T::Array[T.any(OpenAI::Models::Audio::TranscriptionSegment, OpenAI::Util::AnyHash)]) + .returns(T::Array[T.any(OpenAI::Models::Audio::TranscriptionSegment, OpenAI::Util::AnyHash)]) end def segments=(_) end @@ -48,7 +48,7 @@ module OpenAI duration: Float, language: String, text: String, - segments: T::Array[OpenAI::Models::Audio::TranscriptionSegment] + segments: T::Array[T.any(OpenAI::Models::Audio::TranscriptionSegment, OpenAI::Util::AnyHash)] ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/batch.rbi b/rbi/lib/openai/models/batch.rbi index acb1dcad..c86180e1 100644 --- a/rbi/lib/openai/models/batch.rbi +++ b/rbi/lib/openai/models/batch.rbi @@ -201,7 +201,7 @@ module OpenAI created_at: Integer, endpoint: String, input_file_id: String, - status: OpenAI::Models::Batch::Status::TaggedSymbol, + status: OpenAI::Models::Batch::Status::OrSymbol, cancelled_at: Integer, cancelling_at: Integer, completed_at: Integer, @@ -301,7 +301,10 @@ module OpenAI def data end - sig { params(_: T::Array[OpenAI::Models::BatchError]).returns(T::Array[OpenAI::Models::BatchError]) } + sig do + params(_: T::Array[T.any(OpenAI::Models::BatchError, OpenAI::Util::AnyHash)]) + .returns(T::Array[T.any(OpenAI::Models::BatchError, OpenAI::Util::AnyHash)]) + end def data=(_) end @@ -314,7 +317,10 @@ module OpenAI def object=(_) end - sig { params(data: T::Array[OpenAI::Models::BatchError], object: String).returns(T.attached_class) } + sig do + params(data: T::Array[T.any(OpenAI::Models::BatchError, OpenAI::Util::AnyHash)], object: String) + .returns(T.attached_class) + end def self.new(data: nil, object: nil) end diff --git a/rbi/lib/openai/models/beta/assistant.rbi b/rbi/lib/openai/models/beta/assistant.rbi index a15e202a..8dd6289e 100644 --- a/rbi/lib/openai/models/beta/assistant.rbi +++ b/rbi/lib/openai/models/beta/assistant.rbi @@ -238,6 +238,7 @@ module OpenAI tools: T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::FileSearchTool, OpenAI::Models::Beta::FunctionTool ) @@ -246,6 +247,7 @@ module OpenAI T.any( Symbol, OpenAI::Models::ResponseFormatText, + OpenAI::Util::AnyHash, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema ) diff --git a/rbi/lib/openai/models/beta/assistant_create_params.rbi b/rbi/lib/openai/models/beta/assistant_create_params.rbi index 8e5c3a9b..0251dc51 100644 --- a/rbi/lib/openai/models/beta/assistant_create_params.rbi +++ b/rbi/lib/openai/models/beta/assistant_create_params.rbi @@ -196,6 +196,7 @@ module OpenAI _: T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::FileSearchTool, OpenAI::Models::Beta::FunctionTool ) @@ -205,6 +206,7 @@ module OpenAI T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::FileSearchTool, OpenAI::Models::Beta::FunctionTool ) @@ -239,6 +241,7 @@ module OpenAI T.any( Symbol, OpenAI::Models::ResponseFormatText, + OpenAI::Util::AnyHash, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema ) @@ -248,6 +251,7 @@ module OpenAI tools: T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::FileSearchTool, OpenAI::Models::Beta::FunctionTool ) @@ -429,8 +433,22 @@ module OpenAI end sig do - params(_: T::Array[OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore]) - .returns(T::Array[OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore]) + params( + _: T::Array[ + T.any( + OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore, + OpenAI::Util::AnyHash + ) + ] + ) + .returns( + T::Array[ + T.any( + OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore, + OpenAI::Util::AnyHash + ) + ] + ) end def vector_stores=(_) end @@ -438,7 +456,12 @@ module OpenAI sig do params( vector_store_ids: T::Array[String], - vector_stores: T::Array[OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore] + vector_stores: T::Array[ + T.any( + OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore, + OpenAI::Util::AnyHash + ) + ] ) .returns(T.attached_class) end @@ -477,12 +500,14 @@ module OpenAI params( _: T.any( OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static ) ) .returns( T.any( OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static ) ) @@ -521,6 +546,7 @@ module OpenAI params( chunking_strategy: T.any( OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static ), file_ids: T::Array[String], diff --git a/rbi/lib/openai/models/beta/assistant_update_params.rbi b/rbi/lib/openai/models/beta/assistant_update_params.rbi index 8fc4069c..b266b2e2 100644 --- a/rbi/lib/openai/models/beta/assistant_update_params.rbi +++ b/rbi/lib/openai/models/beta/assistant_update_params.rbi @@ -196,6 +196,7 @@ module OpenAI _: T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::FileSearchTool, OpenAI::Models::Beta::FunctionTool ) @@ -205,6 +206,7 @@ module OpenAI T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::FileSearchTool, OpenAI::Models::Beta::FunctionTool ) @@ -239,6 +241,7 @@ module OpenAI T.any( Symbol, OpenAI::Models::ResponseFormatText, + OpenAI::Util::AnyHash, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema ) @@ -248,6 +251,7 @@ module OpenAI tools: T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::FileSearchTool, OpenAI::Models::Beta::FunctionTool ) diff --git a/rbi/lib/openai/models/beta/thread_create_and_run_params.rbi b/rbi/lib/openai/models/beta/thread_create_and_run_params.rbi index dc7a1570..b44e4bcf 100644 --- a/rbi/lib/openai/models/beta/thread_create_and_run_params.rbi +++ b/rbi/lib/openai/models/beta/thread_create_and_run_params.rbi @@ -330,6 +330,7 @@ module OpenAI T.any( Symbol, OpenAI::Models::ResponseFormatText, + OpenAI::Util::AnyHash, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema ) @@ -339,7 +340,8 @@ module OpenAI tool_choice: T.nilable( T.any( OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::OrSymbol, - OpenAI::Models::Beta::AssistantToolChoice + OpenAI::Models::Beta::AssistantToolChoice, + OpenAI::Util::AnyHash ) ), tool_resources: T.nilable(T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources, OpenAI::Util::AnyHash)), @@ -347,6 +349,7 @@ module OpenAI T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::FileSearchTool, OpenAI::Models::Beta::FunctionTool ) @@ -450,8 +453,12 @@ module OpenAI end sig do - params(_: T::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message]) - .returns(T::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message]) + params( + _: T::Array[T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message, OpenAI::Util::AnyHash)] + ) + .returns( + T::Array[T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message, OpenAI::Util::AnyHash)] + ) end def messages=(_) end @@ -497,7 +504,7 @@ module OpenAI # an empty thread will be created. sig do params( - messages: T::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message], + messages: T::Array[T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message, OpenAI::Util::AnyHash)], metadata: T.nilable(T::Hash[Symbol, String]), tool_resources: T.nilable( T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources, OpenAI::Util::AnyHash) @@ -621,13 +628,16 @@ module OpenAI T::Array[ T.any( OpenAI::Models::Beta::Threads::ImageFileContentBlock, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::Threads::ImageURLContentBlock, OpenAI::Models::Beta::Threads::TextContentBlockParam ) ] ), role: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Role::OrSymbol, - attachments: T.nilable(T::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment]), + attachments: T.nilable( + T::Array[T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment, OpenAI::Util::AnyHash)] + ), metadata: T.nilable(T::Hash[Symbol, String]) ) .returns(T.attached_class) @@ -761,6 +771,7 @@ module OpenAI _: T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::Tool::FileSearch ) ] @@ -769,6 +780,7 @@ module OpenAI T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::Tool::FileSearch ) ] @@ -783,6 +795,7 @@ module OpenAI tools: T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::Tool::FileSearch ) ] @@ -980,10 +993,20 @@ module OpenAI sig do params( - _: T::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore] + _: T::Array[ + T.any( + OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore, + OpenAI::Util::AnyHash + ) + ] ) .returns( - T::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore] + T::Array[ + T.any( + OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore, + OpenAI::Util::AnyHash + ) + ] ) end def vector_stores=(_) @@ -992,7 +1015,12 @@ module OpenAI sig do params( vector_store_ids: T::Array[String], - vector_stores: T::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore] + vector_stores: T::Array[ + T.any( + OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore, + OpenAI::Util::AnyHash + ) + ] ) .returns(T.attached_class) end @@ -1031,12 +1059,14 @@ module OpenAI params( _: T.any( OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static ) ) .returns( T.any( OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static ) ) @@ -1075,6 +1105,7 @@ module OpenAI params( chunking_strategy: T.any( OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static ), file_ids: T::Array[String], diff --git a/rbi/lib/openai/models/beta/thread_create_params.rbi b/rbi/lib/openai/models/beta/thread_create_params.rbi index 165cc27d..4fab7a4b 100644 --- a/rbi/lib/openai/models/beta/thread_create_params.rbi +++ b/rbi/lib/openai/models/beta/thread_create_params.rbi @@ -14,8 +14,8 @@ module OpenAI end sig do - params(_: T::Array[OpenAI::Models::Beta::ThreadCreateParams::Message]) - .returns(T::Array[OpenAI::Models::Beta::ThreadCreateParams::Message]) + params(_: T::Array[T.any(OpenAI::Models::Beta::ThreadCreateParams::Message, OpenAI::Util::AnyHash)]) + .returns(T::Array[T.any(OpenAI::Models::Beta::ThreadCreateParams::Message, OpenAI::Util::AnyHash)]) end def messages=(_) end @@ -53,7 +53,7 @@ module OpenAI sig do params( - messages: T::Array[OpenAI::Models::Beta::ThreadCreateParams::Message], + messages: T::Array[T.any(OpenAI::Models::Beta::ThreadCreateParams::Message, OpenAI::Util::AnyHash)], metadata: T.nilable(T::Hash[Symbol, String]), tool_resources: T.nilable(T.any(OpenAI::Models::Beta::ThreadCreateParams::ToolResources, OpenAI::Util::AnyHash)), request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) @@ -175,13 +175,16 @@ module OpenAI T::Array[ T.any( OpenAI::Models::Beta::Threads::ImageFileContentBlock, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::Threads::ImageURLContentBlock, OpenAI::Models::Beta::Threads::TextContentBlockParam ) ] ), role: OpenAI::Models::Beta::ThreadCreateParams::Message::Role::OrSymbol, - attachments: T.nilable(T::Array[OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment]), + attachments: T.nilable( + T::Array[T.any(OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment, OpenAI::Util::AnyHash)] + ), metadata: T.nilable(T::Hash[Symbol, String]) ) .returns(T.attached_class) @@ -310,6 +313,7 @@ module OpenAI _: T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment::Tool::FileSearch ) ] @@ -318,6 +322,7 @@ module OpenAI T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment::Tool::FileSearch ) ] @@ -332,6 +337,7 @@ module OpenAI tools: T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment::Tool::FileSearch ) ] @@ -506,8 +512,22 @@ module OpenAI end sig do - params(_: T::Array[OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore]) - .returns(T::Array[OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore]) + params( + _: T::Array[ + T.any( + OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore, + OpenAI::Util::AnyHash + ) + ] + ) + .returns( + T::Array[ + T.any( + OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore, + OpenAI::Util::AnyHash + ) + ] + ) end def vector_stores=(_) end @@ -515,7 +535,12 @@ module OpenAI sig do params( vector_store_ids: T::Array[String], - vector_stores: T::Array[OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore] + vector_stores: T::Array[ + T.any( + OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore, + OpenAI::Util::AnyHash + ) + ] ) .returns(T.attached_class) end @@ -554,12 +579,14 @@ module OpenAI params( _: T.any( OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static ) ) .returns( T.any( OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static ) ) @@ -598,6 +625,7 @@ module OpenAI params( chunking_strategy: T.any( OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static ), file_ids: T::Array[String], diff --git a/rbi/lib/openai/models/beta/threads/image_file_delta.rbi b/rbi/lib/openai/models/beta/threads/image_file_delta.rbi index 2d687b40..3d650791 100644 --- a/rbi/lib/openai/models/beta/threads/image_file_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/image_file_delta.rbi @@ -12,8 +12,8 @@ module OpenAI end sig do - params(_: OpenAI::Models::Beta::Threads::ImageFileDelta::Detail::TaggedSymbol) - .returns(OpenAI::Models::Beta::Threads::ImageFileDelta::Detail::TaggedSymbol) + params(_: OpenAI::Models::Beta::Threads::ImageFileDelta::Detail::OrSymbol) + .returns(OpenAI::Models::Beta::Threads::ImageFileDelta::Detail::OrSymbol) end def detail=(_) end @@ -30,7 +30,7 @@ module OpenAI end sig do - params(detail: OpenAI::Models::Beta::Threads::ImageFileDelta::Detail::TaggedSymbol, file_id: String) + params(detail: OpenAI::Models::Beta::Threads::ImageFileDelta::Detail::OrSymbol, file_id: String) .returns(T.attached_class) end def self.new(detail: nil, file_id: nil) diff --git a/rbi/lib/openai/models/beta/threads/image_url_delta.rbi b/rbi/lib/openai/models/beta/threads/image_url_delta.rbi index 1363acdb..ac2e9bb6 100644 --- a/rbi/lib/openai/models/beta/threads/image_url_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/image_url_delta.rbi @@ -12,8 +12,8 @@ module OpenAI end sig do - params(_: OpenAI::Models::Beta::Threads::ImageURLDelta::Detail::TaggedSymbol) - .returns(OpenAI::Models::Beta::Threads::ImageURLDelta::Detail::TaggedSymbol) + params(_: OpenAI::Models::Beta::Threads::ImageURLDelta::Detail::OrSymbol) + .returns(OpenAI::Models::Beta::Threads::ImageURLDelta::Detail::OrSymbol) end def detail=(_) end @@ -29,7 +29,7 @@ module OpenAI end sig do - params(detail: OpenAI::Models::Beta::Threads::ImageURLDelta::Detail::TaggedSymbol, url: String) + params(detail: OpenAI::Models::Beta::Threads::ImageURLDelta::Detail::OrSymbol, url: String) .returns(T.attached_class) end def self.new(detail: nil, url: nil) diff --git a/rbi/lib/openai/models/beta/threads/message.rbi b/rbi/lib/openai/models/beta/threads/message.rbi index 6ea4f061..63ead9a1 100644 --- a/rbi/lib/openai/models/beta/threads/message.rbi +++ b/rbi/lib/openai/models/beta/threads/message.rbi @@ -196,11 +196,12 @@ module OpenAI params( id: String, assistant_id: T.nilable(String), - attachments: T.nilable(T::Array[OpenAI::Models::Beta::Threads::Message::Attachment]), + attachments: T.nilable(T::Array[T.any(OpenAI::Models::Beta::Threads::Message::Attachment, OpenAI::Util::AnyHash)]), completed_at: T.nilable(Integer), content: T::Array[ T.any( OpenAI::Models::Beta::Threads::ImageFileContentBlock, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::Threads::ImageURLContentBlock, OpenAI::Models::Beta::Threads::TextContentBlock, OpenAI::Models::Beta::Threads::RefusalContentBlock @@ -210,9 +211,9 @@ module OpenAI incomplete_at: T.nilable(Integer), incomplete_details: T.nilable(T.any(OpenAI::Models::Beta::Threads::Message::IncompleteDetails, OpenAI::Util::AnyHash)), metadata: T.nilable(T::Hash[Symbol, String]), - role: OpenAI::Models::Beta::Threads::Message::Role::TaggedSymbol, + role: OpenAI::Models::Beta::Threads::Message::Role::OrSymbol, run_id: T.nilable(String), - status: OpenAI::Models::Beta::Threads::Message::Status::TaggedSymbol, + status: OpenAI::Models::Beta::Threads::Message::Status::OrSymbol, thread_id: String, object: Symbol ) @@ -298,6 +299,7 @@ module OpenAI _: T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::Threads::Message::Attachment::Tool::AssistantToolsFileSearchTypeOnly ) ] @@ -306,6 +308,7 @@ module OpenAI T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::Threads::Message::Attachment::Tool::AssistantToolsFileSearchTypeOnly ) ] @@ -320,6 +323,7 @@ module OpenAI tools: T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::Threads::Message::Attachment::Tool::AssistantToolsFileSearchTypeOnly ) ] @@ -406,7 +410,7 @@ module OpenAI # On an incomplete message, details about why the message is incomplete. sig do - params(reason: OpenAI::Models::Beta::Threads::Message::IncompleteDetails::Reason::TaggedSymbol) + params(reason: OpenAI::Models::Beta::Threads::Message::IncompleteDetails::Reason::OrSymbol) .returns(T.attached_class) end def self.new(reason:) diff --git a/rbi/lib/openai/models/beta/threads/message_create_params.rbi b/rbi/lib/openai/models/beta/threads/message_create_params.rbi index 74d7c35b..350fd7ae 100644 --- a/rbi/lib/openai/models/beta/threads/message_create_params.rbi +++ b/rbi/lib/openai/models/beta/threads/message_create_params.rbi @@ -105,13 +105,16 @@ module OpenAI T::Array[ T.any( OpenAI::Models::Beta::Threads::ImageFileContentBlock, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::Threads::ImageURLContentBlock, OpenAI::Models::Beta::Threads::TextContentBlockParam ) ] ), role: OpenAI::Models::Beta::Threads::MessageCreateParams::Role::OrSymbol, - attachments: T.nilable(T::Array[OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment]), + attachments: T.nilable( + T::Array[T.any(OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment, OpenAI::Util::AnyHash)] + ), metadata: T.nilable(T::Hash[Symbol, String]), request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) ) @@ -242,6 +245,7 @@ module OpenAI _: T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment::Tool::FileSearch ) ] @@ -250,6 +254,7 @@ module OpenAI T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment::Tool::FileSearch ) ] @@ -264,6 +269,7 @@ module OpenAI tools: T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment::Tool::FileSearch ) ] diff --git a/rbi/lib/openai/models/beta/threads/message_delta.rbi b/rbi/lib/openai/models/beta/threads/message_delta.rbi index c396bfdb..0e044cb6 100644 --- a/rbi/lib/openai/models/beta/threads/message_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/message_delta.rbi @@ -28,6 +28,7 @@ module OpenAI _: T::Array[ T.any( OpenAI::Models::Beta::Threads::ImageFileDeltaBlock, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::Threads::TextDeltaBlock, OpenAI::Models::Beta::Threads::RefusalDeltaBlock, OpenAI::Models::Beta::Threads::ImageURLDeltaBlock @@ -38,6 +39,7 @@ module OpenAI T::Array[ T.any( OpenAI::Models::Beta::Threads::ImageFileDeltaBlock, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::Threads::TextDeltaBlock, OpenAI::Models::Beta::Threads::RefusalDeltaBlock, OpenAI::Models::Beta::Threads::ImageURLDeltaBlock @@ -54,8 +56,8 @@ module OpenAI end sig do - params(_: OpenAI::Models::Beta::Threads::MessageDelta::Role::TaggedSymbol) - .returns(OpenAI::Models::Beta::Threads::MessageDelta::Role::TaggedSymbol) + params(_: OpenAI::Models::Beta::Threads::MessageDelta::Role::OrSymbol) + .returns(OpenAI::Models::Beta::Threads::MessageDelta::Role::OrSymbol) end def role=(_) end @@ -66,12 +68,13 @@ module OpenAI content: T::Array[ T.any( OpenAI::Models::Beta::Threads::ImageFileDeltaBlock, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::Threads::TextDeltaBlock, OpenAI::Models::Beta::Threads::RefusalDeltaBlock, OpenAI::Models::Beta::Threads::ImageURLDeltaBlock ) ], - role: OpenAI::Models::Beta::Threads::MessageDelta::Role::TaggedSymbol + role: OpenAI::Models::Beta::Threads::MessageDelta::Role::OrSymbol ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/beta/threads/run.rbi b/rbi/lib/openai/models/beta/threads/run.rbi index a89a0fe4..c6efb6d4 100644 --- a/rbi/lib/openai/models/beta/threads/run.rbi +++ b/rbi/lib/openai/models/beta/threads/run.rbi @@ -426,22 +426,25 @@ module OpenAI T.any( Symbol, OpenAI::Models::ResponseFormatText, + OpenAI::Util::AnyHash, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema ) ), started_at: T.nilable(Integer), - status: OpenAI::Models::Beta::Threads::RunStatus::TaggedSymbol, + status: OpenAI::Models::Beta::Threads::RunStatus::OrSymbol, thread_id: String, tool_choice: T.nilable( T.any( - OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::TaggedSymbol, - OpenAI::Models::Beta::AssistantToolChoice + OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::OrSymbol, + OpenAI::Models::Beta::AssistantToolChoice, + OpenAI::Util::AnyHash ) ), tools: T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::FileSearchTool, OpenAI::Models::Beta::FunctionTool ) @@ -548,8 +551,8 @@ module OpenAI end sig do - params(_: OpenAI::Models::Beta::Threads::Run::IncompleteDetails::Reason::TaggedSymbol) - .returns(OpenAI::Models::Beta::Threads::Run::IncompleteDetails::Reason::TaggedSymbol) + params(_: OpenAI::Models::Beta::Threads::Run::IncompleteDetails::Reason::OrSymbol) + .returns(OpenAI::Models::Beta::Threads::Run::IncompleteDetails::Reason::OrSymbol) end def reason=(_) end @@ -557,7 +560,7 @@ module OpenAI # Details on why the run is incomplete. Will be `null` if the run is not # incomplete. sig do - params(reason: OpenAI::Models::Beta::Threads::Run::IncompleteDetails::Reason::TaggedSymbol) + params(reason: OpenAI::Models::Beta::Threads::Run::IncompleteDetails::Reason::OrSymbol) .returns(T.attached_class) end def self.new(reason: nil) @@ -614,7 +617,7 @@ module OpenAI # The last error associated with this run. Will be `null` if there are no errors. sig do - params(code: OpenAI::Models::Beta::Threads::Run::LastError::Code::TaggedSymbol, message: String) + params(code: OpenAI::Models::Beta::Threads::Run::LastError::Code::OrSymbol, message: String) .returns(T.attached_class) end def self.new(code:, message:) @@ -711,7 +714,9 @@ module OpenAI # Details on the tool outputs needed for this run to continue. sig do - params(tool_calls: T::Array[OpenAI::Models::Beta::Threads::RequiredActionFunctionToolCall]) + params( + tool_calls: T::Array[T.any(OpenAI::Models::Beta::Threads::RequiredActionFunctionToolCall, OpenAI::Util::AnyHash)] + ) .returns(T.attached_class) end def self.new(tool_calls:) @@ -753,7 +758,7 @@ module OpenAI # control the intial context window of the run. sig do params( - type: OpenAI::Models::Beta::Threads::Run::TruncationStrategy::Type::TaggedSymbol, + type: OpenAI::Models::Beta::Threads::Run::TruncationStrategy::Type::OrSymbol, last_messages: T.nilable(Integer) ) .returns(T.attached_class) diff --git a/rbi/lib/openai/models/beta/threads/run_create_params.rbi b/rbi/lib/openai/models/beta/threads/run_create_params.rbi index 83e052a8..49b45721 100644 --- a/rbi/lib/openai/models/beta/threads/run_create_params.rbi +++ b/rbi/lib/openai/models/beta/threads/run_create_params.rbi @@ -350,7 +350,9 @@ module OpenAI assistant_id: String, include: T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol], additional_instructions: T.nilable(String), - additional_messages: T.nilable(T::Array[OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage]), + additional_messages: T.nilable( + T::Array[T.any(OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage, OpenAI::Util::AnyHash)] + ), instructions: T.nilable(String), max_completion_tokens: T.nilable(Integer), max_prompt_tokens: T.nilable(Integer), @@ -362,6 +364,7 @@ module OpenAI T.any( Symbol, OpenAI::Models::ResponseFormatText, + OpenAI::Util::AnyHash, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema ) @@ -370,13 +373,15 @@ module OpenAI tool_choice: T.nilable( T.any( OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::OrSymbol, - OpenAI::Models::Beta::AssistantToolChoice + OpenAI::Models::Beta::AssistantToolChoice, + OpenAI::Util::AnyHash ) ), tools: T.nilable( T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::FileSearchTool, OpenAI::Models::Beta::FunctionTool ) @@ -566,13 +571,21 @@ module OpenAI T::Array[ T.any( OpenAI::Models::Beta::Threads::ImageFileContentBlock, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::Threads::ImageURLContentBlock, OpenAI::Models::Beta::Threads::TextContentBlockParam ) ] ), role: OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Role::OrSymbol, - attachments: T.nilable(T::Array[OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment]), + attachments: T.nilable( + T::Array[ + T.any( + OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment, + OpenAI::Util::AnyHash + ) + ] + ), metadata: T.nilable(T::Hash[Symbol, String]) ) .returns(T.attached_class) @@ -707,6 +720,7 @@ module OpenAI _: T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::Tool::FileSearch ) ] @@ -715,6 +729,7 @@ module OpenAI T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::Tool::FileSearch ) ] @@ -729,6 +744,7 @@ module OpenAI tools: T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::Tool::FileSearch ) ] diff --git a/rbi/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rbi b/rbi/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rbi index ad431d15..aad29cff 100644 --- a/rbi/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rbi +++ b/rbi/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rbi @@ -31,7 +31,7 @@ module OpenAI sig do params( thread_id: String, - tool_outputs: T::Array[OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams::ToolOutput], + tool_outputs: T::Array[T.any(OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams::ToolOutput, OpenAI::Util::AnyHash)], request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) ) .returns(T.attached_class) diff --git a/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rbi b/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rbi index 98f88637..7dcb0cba 100644 --- a/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rbi @@ -129,6 +129,7 @@ module OpenAI outputs: T::Array[ T.any( OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Logs, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image ) ] diff --git a/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbi b/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbi index 1b18df27..244ab852 100644 --- a/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbi @@ -119,6 +119,7 @@ module OpenAI _: T::Array[ T.any( OpenAI::Models::Beta::Threads::Runs::CodeInterpreterLogs, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage ) ] @@ -127,6 +128,7 @@ module OpenAI T::Array[ T.any( OpenAI::Models::Beta::Threads::Runs::CodeInterpreterLogs, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage ) ] @@ -142,6 +144,7 @@ module OpenAI outputs: T::Array[ T.any( OpenAI::Models::Beta::Threads::Runs::CodeInterpreterLogs, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage ) ] diff --git a/rbi/lib/openai/models/beta/threads/runs/file_search_tool_call.rbi b/rbi/lib/openai/models/beta/threads/runs/file_search_tool_call.rbi index 432b6002..ae88c21f 100644 --- a/rbi/lib/openai/models/beta/threads/runs/file_search_tool_call.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/file_search_tool_call.rbi @@ -88,8 +88,12 @@ module OpenAI end sig do - params(_: T::Array[OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result]) - .returns(T::Array[OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result]) + params( + _: T::Array[T.any(OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result, OpenAI::Util::AnyHash)] + ) + .returns( + T::Array[T.any(OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result, OpenAI::Util::AnyHash)] + ) end def results=(_) end @@ -101,7 +105,7 @@ module OpenAI OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions, OpenAI::Util::AnyHash ), - results: T::Array[OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result] + results: T::Array[T.any(OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result, OpenAI::Util::AnyHash)] ) .returns(T.attached_class) end @@ -155,7 +159,7 @@ module OpenAI # The ranking options for the file search. sig do params( - ranker: OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::Ranker::TaggedSymbol, + ranker: OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::Ranker::OrSymbol, score_threshold: Float ) .returns(T.attached_class) @@ -254,8 +258,22 @@ module OpenAI end sig do - params(_: T::Array[OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content]) - .returns(T::Array[OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content]) + params( + _: T::Array[ + T.any( + OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content, + OpenAI::Util::AnyHash + ) + ] + ) + .returns( + T::Array[ + T.any( + OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content, + OpenAI::Util::AnyHash + ) + ] + ) end def content=(_) end @@ -266,7 +284,12 @@ module OpenAI file_id: String, file_name: String, score: Float, - content: T::Array[OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content] + content: T::Array[ + T.any( + OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content, + OpenAI::Util::AnyHash + ) + ] ) .returns(T.attached_class) end @@ -310,10 +333,10 @@ module OpenAI sig do params( - _: OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::Type::TaggedSymbol + _: OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::Type::OrSymbol ) .returns( - OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::Type::TaggedSymbol + OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::Type::OrSymbol ) end def type=(_) @@ -322,7 +345,7 @@ module OpenAI sig do params( text: String, - type: OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::Type::TaggedSymbol + type: OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::Type::OrSymbol ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/beta/threads/runs/run_step.rbi b/rbi/lib/openai/models/beta/threads/runs/run_step.rbi index 1a008d84..c2d4d482 100644 --- a/rbi/lib/openai/models/beta/threads/runs/run_step.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/run_step.rbi @@ -210,13 +210,14 @@ module OpenAI last_error: T.nilable(T.any(OpenAI::Models::Beta::Threads::Runs::RunStep::LastError, OpenAI::Util::AnyHash)), metadata: T.nilable(T::Hash[Symbol, String]), run_id: String, - status: OpenAI::Models::Beta::Threads::Runs::RunStep::Status::TaggedSymbol, + status: OpenAI::Models::Beta::Threads::Runs::RunStep::Status::OrSymbol, step_details: T.any( OpenAI::Models::Beta::Threads::Runs::MessageCreationStepDetails, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::Threads::Runs::ToolCallsStepDetails ), thread_id: String, - type: OpenAI::Models::Beta::Threads::Runs::RunStep::Type::TaggedSymbol, + type: OpenAI::Models::Beta::Threads::Runs::RunStep::Type::OrSymbol, usage: T.nilable(T.any(OpenAI::Models::Beta::Threads::Runs::RunStep::Usage, OpenAI::Util::AnyHash)), object: Symbol ) @@ -296,7 +297,7 @@ module OpenAI # The last error associated with this run step. Will be `null` if there are no # errors. sig do - params(code: OpenAI::Models::Beta::Threads::Runs::RunStep::LastError::Code::TaggedSymbol, message: String) + params(code: OpenAI::Models::Beta::Threads::Runs::RunStep::LastError::Code::OrSymbol, message: String) .returns(T.attached_class) end def self.new(code:, message:) diff --git a/rbi/lib/openai/models/beta/threads/runs/run_step_delta.rbi b/rbi/lib/openai/models/beta/threads/runs/run_step_delta.rbi index 3753e97f..c291b78b 100644 --- a/rbi/lib/openai/models/beta/threads/runs/run_step_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/run_step_delta.rbi @@ -24,12 +24,14 @@ module OpenAI params( _: T.any( OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::Threads::Runs::ToolCallDeltaObject ) ) .returns( T.any( OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::Threads::Runs::ToolCallDeltaObject ) ) @@ -42,6 +44,7 @@ module OpenAI params( step_details: T.any( OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::Threads::Runs::ToolCallDeltaObject ) ) diff --git a/rbi/lib/openai/models/beta/threads/runs/tool_call_delta_object.rbi b/rbi/lib/openai/models/beta/threads/runs/tool_call_delta_object.rbi index 3258b9d6..17d54160 100644 --- a/rbi/lib/openai/models/beta/threads/runs/tool_call_delta_object.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/tool_call_delta_object.rbi @@ -39,6 +39,7 @@ module OpenAI _: T::Array[ T.any( OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCallDelta, OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta ) @@ -48,6 +49,7 @@ module OpenAI T::Array[ T.any( OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCallDelta, OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta ) @@ -63,6 +65,7 @@ module OpenAI tool_calls: T::Array[ T.any( OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCallDelta, OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta ) diff --git a/rbi/lib/openai/models/beta/threads/runs/tool_calls_step_details.rbi b/rbi/lib/openai/models/beta/threads/runs/tool_calls_step_details.rbi index 3ad8cc7f..d244c946 100644 --- a/rbi/lib/openai/models/beta/threads/runs/tool_calls_step_details.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/tool_calls_step_details.rbi @@ -61,6 +61,7 @@ module OpenAI tool_calls: T::Array[ T.any( OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall, OpenAI::Models::Beta::Threads::Runs::FunctionToolCall ) diff --git a/rbi/lib/openai/models/beta/threads/text.rbi b/rbi/lib/openai/models/beta/threads/text.rbi index 0ba8015c..eda70b49 100644 --- a/rbi/lib/openai/models/beta/threads/text.rbi +++ b/rbi/lib/openai/models/beta/threads/text.rbi @@ -53,6 +53,7 @@ module OpenAI annotations: T::Array[ T.any( OpenAI::Models::Beta::Threads::FileCitationAnnotation, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::Threads::FilePathAnnotation ) ], diff --git a/rbi/lib/openai/models/beta/threads/text_delta.rbi b/rbi/lib/openai/models/beta/threads/text_delta.rbi index 12996683..d7051ba2 100644 --- a/rbi/lib/openai/models/beta/threads/text_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/text_delta.rbi @@ -25,6 +25,7 @@ module OpenAI _: T::Array[ T.any( OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation ) ] @@ -33,6 +34,7 @@ module OpenAI T::Array[ T.any( OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation ) ] @@ -55,6 +57,7 @@ module OpenAI annotations: T::Array[ T.any( OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation ) ], diff --git a/rbi/lib/openai/models/chat/chat_completion.rbi b/rbi/lib/openai/models/chat/chat_completion.rbi index 6e2cd6d4..45ff9cca 100644 --- a/rbi/lib/openai/models/chat/chat_completion.rbi +++ b/rbi/lib/openai/models/chat/chat_completion.rbi @@ -94,10 +94,10 @@ module OpenAI sig do params( id: String, - choices: T::Array[OpenAI::Models::Chat::ChatCompletion::Choice], + choices: T::Array[T.any(OpenAI::Models::Chat::ChatCompletion::Choice, OpenAI::Util::AnyHash)], created: Integer, model: String, - service_tier: T.nilable(OpenAI::Models::Chat::ChatCompletion::ServiceTier::TaggedSymbol), + service_tier: T.nilable(OpenAI::Models::Chat::ChatCompletion::ServiceTier::OrSymbol), system_fingerprint: String, usage: T.any(OpenAI::Models::CompletionUsage, OpenAI::Util::AnyHash), object: Symbol @@ -187,7 +187,7 @@ module OpenAI sig do params( - finish_reason: OpenAI::Models::Chat::ChatCompletion::Choice::FinishReason::TaggedSymbol, + finish_reason: OpenAI::Models::Chat::ChatCompletion::Choice::FinishReason::OrSymbol, index: Integer, logprobs: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletion::Choice::Logprobs, OpenAI::Util::AnyHash)), message: T.any(OpenAI::Models::Chat::ChatCompletionMessage, OpenAI::Util::AnyHash) @@ -267,8 +267,8 @@ module OpenAI # Log probability information for the choice. sig do params( - content: T.nilable(T::Array[OpenAI::Models::Chat::ChatCompletionTokenLogprob]), - refusal: T.nilable(T::Array[OpenAI::Models::Chat::ChatCompletionTokenLogprob]) + content: T.nilable(T::Array[T.any(OpenAI::Models::Chat::ChatCompletionTokenLogprob, OpenAI::Util::AnyHash)]), + refusal: T.nilable(T::Array[T.any(OpenAI::Models::Chat::ChatCompletionTokenLogprob, OpenAI::Util::AnyHash)]) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/chat/chat_completion_assistant_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_assistant_message_param.rbi index de9d2442..260791cc 100644 --- a/rbi/lib/openai/models/chat/chat_completion_assistant_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_assistant_message_param.rbi @@ -127,8 +127,8 @@ module OpenAI end sig do - params(_: T::Array[OpenAI::Models::Chat::ChatCompletionMessageToolCall]) - .returns(T::Array[OpenAI::Models::Chat::ChatCompletionMessageToolCall]) + params(_: T::Array[T.any(OpenAI::Models::Chat::ChatCompletionMessageToolCall, OpenAI::Util::AnyHash)]) + .returns(T::Array[T.any(OpenAI::Models::Chat::ChatCompletionMessageToolCall, OpenAI::Util::AnyHash)]) end def tool_calls=(_) end @@ -143,6 +143,7 @@ module OpenAI T::Array[ T.any( OpenAI::Models::Chat::ChatCompletionContentPartText, + OpenAI::Util::AnyHash, OpenAI::Models::Chat::ChatCompletionContentPartRefusal ) ] @@ -153,7 +154,7 @@ module OpenAI ), name: String, refusal: T.nilable(String), - tool_calls: T::Array[OpenAI::Models::Chat::ChatCompletionMessageToolCall], + tool_calls: T::Array[T.any(OpenAI::Models::Chat::ChatCompletionMessageToolCall, OpenAI::Util::AnyHash)], role: Symbol ) .returns(T.attached_class) diff --git a/rbi/lib/openai/models/chat/chat_completion_chunk.rbi b/rbi/lib/openai/models/chat/chat_completion_chunk.rbi index 802b0de2..e64d0cbc 100644 --- a/rbi/lib/openai/models/chat/chat_completion_chunk.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_chunk.rbi @@ -102,10 +102,10 @@ module OpenAI sig do params( id: String, - choices: T::Array[OpenAI::Models::Chat::ChatCompletionChunk::Choice], + choices: T::Array[T.any(OpenAI::Models::Chat::ChatCompletionChunk::Choice, OpenAI::Util::AnyHash)], created: Integer, model: String, - service_tier: T.nilable(OpenAI::Models::Chat::ChatCompletionChunk::ServiceTier::TaggedSymbol), + service_tier: T.nilable(OpenAI::Models::Chat::ChatCompletionChunk::ServiceTier::OrSymbol), system_fingerprint: String, usage: T.nilable(T.any(OpenAI::Models::CompletionUsage, OpenAI::Util::AnyHash)), object: Symbol @@ -200,7 +200,7 @@ module OpenAI sig do params( delta: T.any(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta, OpenAI::Util::AnyHash), - finish_reason: T.nilable(OpenAI::Models::Chat::ChatCompletionChunk::Choice::FinishReason::TaggedSymbol), + finish_reason: T.nilable(OpenAI::Models::Chat::ChatCompletionChunk::Choice::FinishReason::OrSymbol), index: Integer, logprobs: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Logprobs, OpenAI::Util::AnyHash)) ) @@ -265,8 +265,8 @@ module OpenAI end sig do - params(_: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::Role::TaggedSymbol) - .returns(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::Role::TaggedSymbol) + params(_: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::Role::OrSymbol) + .returns(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::Role::OrSymbol) end def role=(_) end @@ -276,8 +276,12 @@ module OpenAI end sig do - params(_: T::Array[OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall]) - .returns(T::Array[OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall]) + params( + _: T::Array[T.any(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall, OpenAI::Util::AnyHash)] + ) + .returns( + T::Array[T.any(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall, OpenAI::Util::AnyHash)] + ) end def tool_calls=(_) end @@ -288,8 +292,8 @@ module OpenAI content: T.nilable(String), function_call: T.any(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::FunctionCall, OpenAI::Util::AnyHash), refusal: T.nilable(String), - role: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::Role::TaggedSymbol, - tool_calls: T::Array[OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall] + role: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::Role::OrSymbol, + tool_calls: T::Array[T.any(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall, OpenAI::Util::AnyHash)] ) .returns(T.attached_class) end @@ -407,8 +411,8 @@ module OpenAI end sig do - params(_: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Type::TaggedSymbol) - .returns(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Type::TaggedSymbol) + params(_: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Type::OrSymbol) + .returns(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Type::OrSymbol) end def type=(_) end @@ -418,7 +422,7 @@ module OpenAI index: Integer, id: String, function: T.any(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Function, OpenAI::Util::AnyHash), - type: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Type::TaggedSymbol + type: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Type::OrSymbol ) .returns(T.attached_class) end @@ -552,8 +556,8 @@ module OpenAI # Log probability information for the choice. sig do params( - content: T.nilable(T::Array[OpenAI::Models::Chat::ChatCompletionTokenLogprob]), - refusal: T.nilable(T::Array[OpenAI::Models::Chat::ChatCompletionTokenLogprob]) + content: T.nilable(T::Array[T.any(OpenAI::Models::Chat::ChatCompletionTokenLogprob, OpenAI::Util::AnyHash)]), + refusal: T.nilable(T::Array[T.any(OpenAI::Models::Chat::ChatCompletionTokenLogprob, OpenAI::Util::AnyHash)]) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/chat/chat_completion_developer_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_developer_message_param.rbi index 2ccc5505..216c218d 100644 --- a/rbi/lib/openai/models/chat/chat_completion_developer_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_developer_message_param.rbi @@ -40,7 +40,7 @@ module OpenAI # replace the previous `system` messages. sig do params( - content: T.any(String, T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText]), + content: T.any(String, T::Array[T.any(OpenAI::Models::Chat::ChatCompletionContentPartText, OpenAI::Util::AnyHash)]), name: String, role: Symbol ) diff --git a/rbi/lib/openai/models/chat/chat_completion_message.rbi b/rbi/lib/openai/models/chat/chat_completion_message.rbi index a2ea14b8..12d3f9c5 100644 --- a/rbi/lib/openai/models/chat/chat_completion_message.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_message.rbi @@ -38,8 +38,8 @@ module OpenAI end sig do - params(_: T::Array[OpenAI::Models::Chat::ChatCompletionMessage::Annotation]) - .returns(T::Array[OpenAI::Models::Chat::ChatCompletionMessage::Annotation]) + params(_: T::Array[T.any(OpenAI::Models::Chat::ChatCompletionMessage::Annotation, OpenAI::Util::AnyHash)]) + .returns(T::Array[T.any(OpenAI::Models::Chat::ChatCompletionMessage::Annotation, OpenAI::Util::AnyHash)]) end def annotations=(_) end @@ -77,8 +77,8 @@ module OpenAI end sig do - params(_: T::Array[OpenAI::Models::Chat::ChatCompletionMessageToolCall]) - .returns(T::Array[OpenAI::Models::Chat::ChatCompletionMessageToolCall]) + params(_: T::Array[T.any(OpenAI::Models::Chat::ChatCompletionMessageToolCall, OpenAI::Util::AnyHash)]) + .returns(T::Array[T.any(OpenAI::Models::Chat::ChatCompletionMessageToolCall, OpenAI::Util::AnyHash)]) end def tool_calls=(_) end @@ -88,10 +88,10 @@ module OpenAI params( content: T.nilable(String), refusal: T.nilable(String), - annotations: T::Array[OpenAI::Models::Chat::ChatCompletionMessage::Annotation], + annotations: T::Array[T.any(OpenAI::Models::Chat::ChatCompletionMessage::Annotation, OpenAI::Util::AnyHash)], audio: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionAudio, OpenAI::Util::AnyHash)), function_call: T.any(OpenAI::Models::Chat::ChatCompletionMessage::FunctionCall, OpenAI::Util::AnyHash), - tool_calls: T::Array[OpenAI::Models::Chat::ChatCompletionMessageToolCall], + tool_calls: T::Array[T.any(OpenAI::Models::Chat::ChatCompletionMessageToolCall, OpenAI::Util::AnyHash)], role: Symbol ) .returns(T.attached_class) diff --git a/rbi/lib/openai/models/chat/chat_completion_prediction_content.rbi b/rbi/lib/openai/models/chat/chat_completion_prediction_content.rbi index 4996c9f4..472440af 100644 --- a/rbi/lib/openai/models/chat/chat_completion_prediction_content.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_prediction_content.rbi @@ -32,7 +32,7 @@ module OpenAI # being regenerated. sig do params( - content: T.any(String, T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText]), + content: T.any(String, T::Array[T.any(OpenAI::Models::Chat::ChatCompletionContentPartText, OpenAI::Util::AnyHash)]), type: Symbol ) .returns(T.attached_class) diff --git a/rbi/lib/openai/models/chat/chat_completion_system_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_system_message_param.rbi index f8cb806e..6c3b80e1 100644 --- a/rbi/lib/openai/models/chat/chat_completion_system_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_system_message_param.rbi @@ -40,7 +40,7 @@ module OpenAI # for this purpose instead. sig do params( - content: T.any(String, T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText]), + content: T.any(String, T::Array[T.any(OpenAI::Models::Chat::ChatCompletionContentPartText, OpenAI::Util::AnyHash)]), name: String, role: Symbol ) diff --git a/rbi/lib/openai/models/chat/chat_completion_token_logprob.rbi b/rbi/lib/openai/models/chat/chat_completion_token_logprob.rbi index 1eae4294..ea88fd66 100644 --- a/rbi/lib/openai/models/chat/chat_completion_token_logprob.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_token_logprob.rbi @@ -55,7 +55,7 @@ module OpenAI token: String, bytes: T.nilable(T::Array[Integer]), logprob: Float, - top_logprobs: T::Array[OpenAI::Models::Chat::ChatCompletionTokenLogprob::TopLogprob] + top_logprobs: T::Array[T.any(OpenAI::Models::Chat::ChatCompletionTokenLogprob::TopLogprob, OpenAI::Util::AnyHash)] ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/chat/chat_completion_tool_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_tool_message_param.rbi index 42697bfe..bd333634 100644 --- a/rbi/lib/openai/models/chat/chat_completion_tool_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_tool_message_param.rbi @@ -36,7 +36,7 @@ module OpenAI sig do params( - content: T.any(String, T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText]), + content: T.any(String, T::Array[T.any(OpenAI::Models::Chat::ChatCompletionContentPartText, OpenAI::Util::AnyHash)]), tool_call_id: String, role: Symbol ) diff --git a/rbi/lib/openai/models/chat/chat_completion_user_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_user_message_param.rbi index 58bd1d57..a9e388f6 100644 --- a/rbi/lib/openai/models/chat/chat_completion_user_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_user_message_param.rbi @@ -82,6 +82,7 @@ module OpenAI T::Array[ T.any( OpenAI::Models::Chat::ChatCompletionContentPartText, + OpenAI::Util::AnyHash, OpenAI::Models::Chat::ChatCompletionContentPartImage, OpenAI::Models::Chat::ChatCompletionContentPartInputAudio, OpenAI::Models::Chat::ChatCompletionContentPart::File diff --git a/rbi/lib/openai/models/chat/completion_create_params.rbi b/rbi/lib/openai/models/chat/completion_create_params.rbi index 6f7c3948..cf85872a 100644 --- a/rbi/lib/openai/models/chat/completion_create_params.rbi +++ b/rbi/lib/openai/models/chat/completion_create_params.rbi @@ -131,13 +131,15 @@ module OpenAI params( _: T.any( OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode::OrSymbol, - OpenAI::Models::Chat::ChatCompletionFunctionCallOption + OpenAI::Models::Chat::ChatCompletionFunctionCallOption, + OpenAI::Util::AnyHash ) ) .returns( T.any( OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode::OrSymbol, - OpenAI::Models::Chat::ChatCompletionFunctionCallOption + OpenAI::Models::Chat::ChatCompletionFunctionCallOption, + OpenAI::Util::AnyHash ) ) end @@ -152,8 +154,8 @@ module OpenAI end sig do - params(_: T::Array[OpenAI::Models::Chat::CompletionCreateParams::Function]) - .returns(T::Array[OpenAI::Models::Chat::CompletionCreateParams::Function]) + params(_: T::Array[T.any(OpenAI::Models::Chat::CompletionCreateParams::Function, OpenAI::Util::AnyHash)]) + .returns(T::Array[T.any(OpenAI::Models::Chat::CompletionCreateParams::Function, OpenAI::Util::AnyHash)]) end def functions=(_) end @@ -337,6 +339,7 @@ module OpenAI params( _: T.any( OpenAI::Models::ResponseFormatText, + OpenAI::Util::AnyHash, OpenAI::Models::ResponseFormatJSONSchema, OpenAI::Models::ResponseFormatJSONObject ) @@ -344,6 +347,7 @@ module OpenAI .returns( T.any( OpenAI::Models::ResponseFormatText, + OpenAI::Util::AnyHash, OpenAI::Models::ResponseFormatJSONSchema, OpenAI::Models::ResponseFormatJSONObject ) @@ -470,13 +474,15 @@ module OpenAI params( _: T.any( OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto::OrSymbol, - OpenAI::Models::Chat::ChatCompletionNamedToolChoice + OpenAI::Models::Chat::ChatCompletionNamedToolChoice, + OpenAI::Util::AnyHash ) ) .returns( T.any( OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto::OrSymbol, - OpenAI::Models::Chat::ChatCompletionNamedToolChoice + OpenAI::Models::Chat::ChatCompletionNamedToolChoice, + OpenAI::Util::AnyHash ) ) end @@ -491,8 +497,8 @@ module OpenAI end sig do - params(_: T::Array[OpenAI::Models::Chat::ChatCompletionTool]) - .returns(T::Array[OpenAI::Models::Chat::ChatCompletionTool]) + params(_: T::Array[T.any(OpenAI::Models::Chat::ChatCompletionTool, OpenAI::Util::AnyHash)]) + .returns(T::Array[T.any(OpenAI::Models::Chat::ChatCompletionTool, OpenAI::Util::AnyHash)]) end def tools=(_) end @@ -551,6 +557,7 @@ module OpenAI messages: T::Array[ T.any( OpenAI::Models::Chat::ChatCompletionDeveloperMessageParam, + OpenAI::Util::AnyHash, OpenAI::Models::Chat::ChatCompletionSystemMessageParam, OpenAI::Models::Chat::ChatCompletionUserMessageParam, OpenAI::Models::Chat::ChatCompletionAssistantMessageParam, @@ -563,9 +570,10 @@ module OpenAI frequency_penalty: T.nilable(Float), function_call: T.any( OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode::OrSymbol, - OpenAI::Models::Chat::ChatCompletionFunctionCallOption + OpenAI::Models::Chat::ChatCompletionFunctionCallOption, + OpenAI::Util::AnyHash ), - functions: T::Array[OpenAI::Models::Chat::CompletionCreateParams::Function], + functions: T::Array[T.any(OpenAI::Models::Chat::CompletionCreateParams::Function, OpenAI::Util::AnyHash)], logit_bias: T.nilable(T::Hash[Symbol, Integer]), logprobs: T.nilable(T::Boolean), max_completion_tokens: T.nilable(Integer), @@ -579,6 +587,7 @@ module OpenAI reasoning_effort: T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol), response_format: T.any( OpenAI::Models::ResponseFormatText, + OpenAI::Util::AnyHash, OpenAI::Models::ResponseFormatJSONSchema, OpenAI::Models::ResponseFormatJSONObject ), @@ -590,9 +599,10 @@ module OpenAI temperature: T.nilable(Float), tool_choice: T.any( OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto::OrSymbol, - OpenAI::Models::Chat::ChatCompletionNamedToolChoice + OpenAI::Models::Chat::ChatCompletionNamedToolChoice, + OpenAI::Util::AnyHash ), - tools: T::Array[OpenAI::Models::Chat::ChatCompletionTool], + tools: T::Array[T.any(OpenAI::Models::Chat::ChatCompletionTool, OpenAI::Util::AnyHash)], top_logprobs: T.nilable(Integer), top_p: T.nilable(Float), user: String, diff --git a/rbi/lib/openai/models/completion.rbi b/rbi/lib/openai/models/completion.rbi index a6536d37..e270ee5f 100644 --- a/rbi/lib/openai/models/completion.rbi +++ b/rbi/lib/openai/models/completion.rbi @@ -77,7 +77,7 @@ module OpenAI sig do params( id: String, - choices: T::Array[OpenAI::Models::CompletionChoice], + choices: T::Array[T.any(OpenAI::Models::CompletionChoice, OpenAI::Util::AnyHash)], created: Integer, model: String, system_fingerprint: String, diff --git a/rbi/lib/openai/models/completion_choice.rbi b/rbi/lib/openai/models/completion_choice.rbi index cbf7bf7d..bdd63223 100644 --- a/rbi/lib/openai/models/completion_choice.rbi +++ b/rbi/lib/openai/models/completion_choice.rbi @@ -47,7 +47,7 @@ module OpenAI sig do params( - finish_reason: OpenAI::Models::CompletionChoice::FinishReason::TaggedSymbol, + finish_reason: OpenAI::Models::CompletionChoice::FinishReason::OrSymbol, index: Integer, logprobs: T.nilable(T.any(OpenAI::Models::CompletionChoice::Logprobs, OpenAI::Util::AnyHash)), text: String diff --git a/rbi/lib/openai/models/compound_filter.rbi b/rbi/lib/openai/models/compound_filter.rbi index 3f6d0c84..eaf9891d 100644 --- a/rbi/lib/openai/models/compound_filter.rbi +++ b/rbi/lib/openai/models/compound_filter.rbi @@ -31,7 +31,7 @@ module OpenAI # Combine multiple filters using `and` or `or`. sig do params( - filters: T::Array[T.any(OpenAI::Models::ComparisonFilter, T.anything)], + filters: T::Array[T.any(OpenAI::Models::ComparisonFilter, OpenAI::Util::AnyHash, T.anything)], type: OpenAI::Models::CompoundFilter::Type::OrSymbol ) .returns(T.attached_class) diff --git a/rbi/lib/openai/models/create_embedding_response.rbi b/rbi/lib/openai/models/create_embedding_response.rbi index e6a54b86..e9a4af7f 100644 --- a/rbi/lib/openai/models/create_embedding_response.rbi +++ b/rbi/lib/openai/models/create_embedding_response.rbi @@ -44,7 +44,7 @@ module OpenAI sig do params( - data: T::Array[OpenAI::Models::Embedding], + data: T::Array[T.any(OpenAI::Models::Embedding, OpenAI::Util::AnyHash)], model: String, usage: T.any(OpenAI::Models::CreateEmbeddingResponse::Usage, OpenAI::Util::AnyHash), object: Symbol diff --git a/rbi/lib/openai/models/file_object.rbi b/rbi/lib/openai/models/file_object.rbi index c96e406d..00ace88e 100644 --- a/rbi/lib/openai/models/file_object.rbi +++ b/rbi/lib/openai/models/file_object.rbi @@ -101,8 +101,8 @@ module OpenAI bytes: Integer, created_at: Integer, filename: String, - purpose: OpenAI::Models::FileObject::Purpose::TaggedSymbol, - status: OpenAI::Models::FileObject::Status::TaggedSymbol, + purpose: OpenAI::Models::FileObject::Purpose::OrSymbol, + status: OpenAI::Models::FileObject::Status::OrSymbol, expires_at: Integer, status_details: String, object: Symbol diff --git a/rbi/lib/openai/models/fine_tuning/fine_tuning_job.rbi b/rbi/lib/openai/models/fine_tuning/fine_tuning_job.rbi index 85b64f51..04cf51bd 100644 --- a/rbi/lib/openai/models/fine_tuning/fine_tuning_job.rbi +++ b/rbi/lib/openai/models/fine_tuning/fine_tuning_job.rbi @@ -221,12 +221,14 @@ module OpenAI organization_id: String, result_files: T::Array[String], seed: Integer, - status: OpenAI::Models::FineTuning::FineTuningJob::Status::TaggedSymbol, + status: OpenAI::Models::FineTuning::FineTuningJob::Status::OrSymbol, trained_tokens: T.nilable(Integer), training_file: String, validation_file: T.nilable(String), estimated_finish: T.nilable(Integer), - integrations: T.nilable(T::Array[OpenAI::Models::FineTuning::FineTuningJobWandbIntegrationObject]), + integrations: T.nilable( + T::Array[T.any(OpenAI::Models::FineTuning::FineTuningJobWandbIntegrationObject, OpenAI::Util::AnyHash)] + ), metadata: T.nilable(T::Hash[Symbol, String]), method_: T.any(OpenAI::Models::FineTuning::FineTuningJob::Method, OpenAI::Util::AnyHash), object: Symbol @@ -480,8 +482,8 @@ module OpenAI end sig do - params(_: OpenAI::Models::FineTuning::FineTuningJob::Method::Type::TaggedSymbol) - .returns(OpenAI::Models::FineTuning::FineTuningJob::Method::Type::TaggedSymbol) + params(_: OpenAI::Models::FineTuning::FineTuningJob::Method::Type::OrSymbol) + .returns(OpenAI::Models::FineTuning::FineTuningJob::Method::Type::OrSymbol) end def type=(_) end @@ -491,7 +493,7 @@ module OpenAI params( dpo: T.any(OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo, OpenAI::Util::AnyHash), supervised: T.any(OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised, OpenAI::Util::AnyHash), - type: OpenAI::Models::FineTuning::FineTuningJob::Method::Type::TaggedSymbol + type: OpenAI::Models::FineTuning::FineTuningJob::Method::Type::OrSymbol ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/fine_tuning/fine_tuning_job_event.rbi b/rbi/lib/openai/models/fine_tuning/fine_tuning_job_event.rbi index aeb14b83..372492b9 100644 --- a/rbi/lib/openai/models/fine_tuning/fine_tuning_job_event.rbi +++ b/rbi/lib/openai/models/fine_tuning/fine_tuning_job_event.rbi @@ -67,8 +67,8 @@ module OpenAI end sig do - params(_: OpenAI::Models::FineTuning::FineTuningJobEvent::Type::TaggedSymbol) - .returns(OpenAI::Models::FineTuning::FineTuningJobEvent::Type::TaggedSymbol) + params(_: OpenAI::Models::FineTuning::FineTuningJobEvent::Type::OrSymbol) + .returns(OpenAI::Models::FineTuning::FineTuningJobEvent::Type::OrSymbol) end def type=(_) end @@ -78,10 +78,10 @@ module OpenAI params( id: String, created_at: Integer, - level: OpenAI::Models::FineTuning::FineTuningJobEvent::Level::TaggedSymbol, + level: OpenAI::Models::FineTuning::FineTuningJobEvent::Level::OrSymbol, message: String, data: T.anything, - type: OpenAI::Models::FineTuning::FineTuningJobEvent::Type::TaggedSymbol, + type: OpenAI::Models::FineTuning::FineTuningJobEvent::Type::OrSymbol, object: Symbol ) .returns(T.attached_class) diff --git a/rbi/lib/openai/models/fine_tuning/job_create_params.rbi b/rbi/lib/openai/models/fine_tuning/job_create_params.rbi index cc5d4db7..5c524237 100644 --- a/rbi/lib/openai/models/fine_tuning/job_create_params.rbi +++ b/rbi/lib/openai/models/fine_tuning/job_create_params.rbi @@ -145,7 +145,9 @@ module OpenAI model: T.any(String, OpenAI::Models::FineTuning::JobCreateParams::Model::OrSymbol), training_file: String, hyperparameters: T.any(OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters, OpenAI::Util::AnyHash), - integrations: T.nilable(T::Array[OpenAI::Models::FineTuning::JobCreateParams::Integration]), + integrations: T.nilable( + T::Array[T.any(OpenAI::Models::FineTuning::JobCreateParams::Integration, OpenAI::Util::AnyHash)] + ), metadata: T.nilable(T::Hash[Symbol, String]), method_: T.any(OpenAI::Models::FineTuning::JobCreateParams::Method, OpenAI::Util::AnyHash), seed: T.nilable(Integer), diff --git a/rbi/lib/openai/models/images_response.rbi b/rbi/lib/openai/models/images_response.rbi index e9537809..724883f8 100644 --- a/rbi/lib/openai/models/images_response.rbi +++ b/rbi/lib/openai/models/images_response.rbi @@ -19,7 +19,10 @@ module OpenAI def data=(_) end - sig { params(created: Integer, data: T::Array[OpenAI::Models::Image]).returns(T.attached_class) } + sig do + params(created: Integer, data: T::Array[T.any(OpenAI::Models::Image, OpenAI::Util::AnyHash)]) + .returns(T.attached_class) + end def self.new(created:, data:) end diff --git a/rbi/lib/openai/models/moderation.rbi b/rbi/lib/openai/models/moderation.rbi index b589c1b7..26fc4f28 100644 --- a/rbi/lib/openai/models/moderation.rbi +++ b/rbi/lib/openai/models/moderation.rbi @@ -445,19 +445,19 @@ module OpenAI # A list of the categories along with the input type(s) that the score applies to. sig do params( - harassment: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Harassment::TaggedSymbol], - harassment_threatening: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::HarassmentThreatening::TaggedSymbol], - hate: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Hate::TaggedSymbol], - hate_threatening: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::HateThreatening::TaggedSymbol], - illicit: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Illicit::TaggedSymbol], - illicit_violent: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::IllicitViolent::TaggedSymbol], - self_harm: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarm::TaggedSymbol], - self_harm_instructions: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmInstruction::TaggedSymbol], - self_harm_intent: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmIntent::TaggedSymbol], - sexual: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Sexual::TaggedSymbol], - sexual_minors: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SexualMinor::TaggedSymbol], - violence: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Violence::TaggedSymbol], - violence_graphic: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::ViolenceGraphic::TaggedSymbol] + harassment: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Harassment::OrSymbol], + harassment_threatening: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::HarassmentThreatening::OrSymbol], + hate: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Hate::OrSymbol], + hate_threatening: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::HateThreatening::OrSymbol], + illicit: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Illicit::OrSymbol], + illicit_violent: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::IllicitViolent::OrSymbol], + self_harm: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarm::OrSymbol], + self_harm_instructions: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmInstruction::OrSymbol], + self_harm_intent: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmIntent::OrSymbol], + sexual: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Sexual::OrSymbol], + sexual_minors: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SexualMinor::OrSymbol], + violence: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Violence::OrSymbol], + violence_graphic: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::ViolenceGraphic::OrSymbol] ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/moderation_create_params.rbi b/rbi/lib/openai/models/moderation_create_params.rbi index 282a33b9..591f644a 100644 --- a/rbi/lib/openai/models/moderation_create_params.rbi +++ b/rbi/lib/openai/models/moderation_create_params.rbi @@ -59,7 +59,7 @@ module OpenAI input: T.any( String, T::Array[String], - T::Array[T.any(OpenAI::Models::ModerationImageURLInput, OpenAI::Models::ModerationTextInput)] + T::Array[T.any(OpenAI::Models::ModerationImageURLInput, OpenAI::Util::AnyHash, OpenAI::Models::ModerationTextInput)] ), model: T.any(String, OpenAI::Models::ModerationModel::OrSymbol), request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) diff --git a/rbi/lib/openai/models/moderation_create_response.rbi b/rbi/lib/openai/models/moderation_create_response.rbi index bf831d82..c3a9c5d8 100644 --- a/rbi/lib/openai/models/moderation_create_response.rbi +++ b/rbi/lib/openai/models/moderation_create_response.rbi @@ -31,7 +31,14 @@ module OpenAI end # Represents if a given text input is potentially harmful. - sig { params(id: String, model: String, results: T::Array[OpenAI::Models::Moderation]).returns(T.attached_class) } + sig do + params( + id: String, + model: String, + results: T::Array[T.any(OpenAI::Models::Moderation, OpenAI::Util::AnyHash)] + ) + .returns(T.attached_class) + end def self.new(id:, model:, results:) end diff --git a/rbi/lib/openai/models/responses/easy_input_message.rbi b/rbi/lib/openai/models/responses/easy_input_message.rbi index bf8bc082..18a3e4ee 100644 --- a/rbi/lib/openai/models/responses/easy_input_message.rbi +++ b/rbi/lib/openai/models/responses/easy_input_message.rbi @@ -89,6 +89,7 @@ module OpenAI T::Array[ T.any( OpenAI::Models::Responses::ResponseInputText, + OpenAI::Util::AnyHash, OpenAI::Models::Responses::ResponseInputImage, OpenAI::Models::Responses::ResponseInputFile ) diff --git a/rbi/lib/openai/models/responses/file_search_tool.rbi b/rbi/lib/openai/models/responses/file_search_tool.rbi index e2d118da..4380f086 100644 --- a/rbi/lib/openai/models/responses/file_search_tool.rbi +++ b/rbi/lib/openai/models/responses/file_search_tool.rbi @@ -28,8 +28,8 @@ module OpenAI end sig do - params(_: T.any(OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter)) - .returns(T.any(OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter)) + params(_: T.any(OpenAI::Models::ComparisonFilter, OpenAI::Util::AnyHash, OpenAI::Models::CompoundFilter)) + .returns(T.any(OpenAI::Models::ComparisonFilter, OpenAI::Util::AnyHash, OpenAI::Models::CompoundFilter)) end def filters=(_) end @@ -62,7 +62,7 @@ module OpenAI sig do params( vector_store_ids: T::Array[String], - filters: T.any(OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter), + filters: T.any(OpenAI::Models::ComparisonFilter, OpenAI::Util::AnyHash, OpenAI::Models::CompoundFilter), max_num_results: Integer, ranking_options: T.any(OpenAI::Models::Responses::FileSearchTool::RankingOptions, OpenAI::Util::AnyHash), type: Symbol diff --git a/rbi/lib/openai/models/responses/response.rbi b/rbi/lib/openai/models/responses/response.rbi index bf617bc9..6aabb7ec 100644 --- a/rbi/lib/openai/models/responses/response.rbi +++ b/rbi/lib/openai/models/responses/response.rbi @@ -326,8 +326,8 @@ module OpenAI end sig do - params(_: OpenAI::Models::Responses::ResponseStatus::TaggedSymbol) - .returns(OpenAI::Models::Responses::ResponseStatus::TaggedSymbol) + params(_: OpenAI::Models::Responses::ResponseStatus::OrSymbol) + .returns(OpenAI::Models::Responses::ResponseStatus::OrSymbol) end def status=(_) end @@ -398,10 +398,11 @@ module OpenAI incomplete_details: T.nilable(T.any(OpenAI::Models::Responses::Response::IncompleteDetails, OpenAI::Util::AnyHash)), instructions: T.nilable(String), metadata: T.nilable(T::Hash[Symbol, String]), - model: T.any(String, OpenAI::Models::ChatModel::TaggedSymbol, OpenAI::Models::ResponsesModel::TaggedSymbol), + model: T.any(String, OpenAI::Models::ChatModel::OrSymbol, OpenAI::Models::ResponsesModel::OrSymbol), output: T::Array[ T.any( OpenAI::Models::Responses::ResponseOutputMessage, + OpenAI::Util::AnyHash, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseFunctionWebSearch, @@ -412,13 +413,15 @@ module OpenAI parallel_tool_calls: T::Boolean, temperature: T.nilable(Float), tool_choice: T.any( - OpenAI::Models::Responses::ToolChoiceOptions::TaggedSymbol, + OpenAI::Models::Responses::ToolChoiceOptions::OrSymbol, OpenAI::Models::Responses::ToolChoiceTypes, + OpenAI::Util::AnyHash, OpenAI::Models::Responses::ToolChoiceFunction ), tools: T::Array[ T.any( OpenAI::Models::Responses::FileSearchTool, + OpenAI::Util::AnyHash, OpenAI::Models::Responses::FunctionTool, OpenAI::Models::Responses::ComputerTool, OpenAI::Models::Responses::WebSearchTool @@ -428,9 +431,9 @@ module OpenAI max_output_tokens: T.nilable(Integer), previous_response_id: T.nilable(String), reasoning: T.nilable(T.any(OpenAI::Models::Reasoning, OpenAI::Util::AnyHash)), - status: OpenAI::Models::Responses::ResponseStatus::TaggedSymbol, + status: OpenAI::Models::Responses::ResponseStatus::OrSymbol, text: T.any(OpenAI::Models::Responses::ResponseTextConfig, OpenAI::Util::AnyHash), - truncation: T.nilable(OpenAI::Models::Responses::Response::Truncation::TaggedSymbol), + truncation: T.nilable(OpenAI::Models::Responses::Response::Truncation::OrSymbol), usage: T.any(OpenAI::Models::Responses::ResponseUsage, OpenAI::Util::AnyHash), user: String, object: Symbol @@ -522,15 +525,15 @@ module OpenAI end sig do - params(_: OpenAI::Models::Responses::Response::IncompleteDetails::Reason::TaggedSymbol) - .returns(OpenAI::Models::Responses::Response::IncompleteDetails::Reason::TaggedSymbol) + params(_: OpenAI::Models::Responses::Response::IncompleteDetails::Reason::OrSymbol) + .returns(OpenAI::Models::Responses::Response::IncompleteDetails::Reason::OrSymbol) end def reason=(_) end # Details about why the response is incomplete. sig do - params(reason: OpenAI::Models::Responses::Response::IncompleteDetails::Reason::TaggedSymbol) + params(reason: OpenAI::Models::Responses::Response::IncompleteDetails::Reason::OrSymbol) .returns(T.attached_class) end def self.new(reason: nil) diff --git a/rbi/lib/openai/models/responses/response_code_interpreter_tool_call.rbi b/rbi/lib/openai/models/responses/response_code_interpreter_tool_call.rbi index cb72f53d..0e9fda65 100644 --- a/rbi/lib/openai/models/responses/response_code_interpreter_tool_call.rbi +++ b/rbi/lib/openai/models/responses/response_code_interpreter_tool_call.rbi @@ -86,10 +86,11 @@ module OpenAI results: T::Array[ T.any( OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Logs, + OpenAI::Util::AnyHash, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Files ) ], - status: OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Status::TaggedSymbol, + status: OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Status::OrSymbol, type: Symbol ) .returns(T.attached_class) @@ -184,7 +185,12 @@ module OpenAI # The output of a code interpreter tool call that is a file. sig do params( - files: T::Array[OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Files::File], + files: T::Array[ + T.any( + OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Files::File, + OpenAI::Util::AnyHash + ) + ], type: Symbol ) .returns(T.attached_class) diff --git a/rbi/lib/openai/models/responses/response_computer_tool_call.rbi b/rbi/lib/openai/models/responses/response_computer_tool_call.rbi index d7f5e2c6..f3845301 100644 --- a/rbi/lib/openai/models/responses/response_computer_tool_call.rbi +++ b/rbi/lib/openai/models/responses/response_computer_tool_call.rbi @@ -117,6 +117,7 @@ module OpenAI id: String, action: T.any( OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click, + OpenAI::Util::AnyHash, OpenAI::Models::Responses::ResponseComputerToolCall::Action::DoubleClick, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Drag, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Keypress, @@ -127,7 +128,7 @@ module OpenAI OpenAI::Models::Responses::ResponseComputerToolCall::Action::Wait ), call_id: String, - pending_safety_checks: T::Array[OpenAI::Models::Responses::ResponseComputerToolCall::PendingSafetyCheck], + pending_safety_checks: T::Array[T.any(OpenAI::Models::Responses::ResponseComputerToolCall::PendingSafetyCheck, OpenAI::Util::AnyHash)], status: OpenAI::Models::Responses::ResponseComputerToolCall::Status::OrSymbol, type: OpenAI::Models::Responses::ResponseComputerToolCall::Type::OrSymbol ) @@ -359,7 +360,7 @@ module OpenAI # A drag action. sig do params( - path: T::Array[OpenAI::Models::Responses::ResponseComputerToolCall::Action::Drag::Path], + path: T::Array[T.any(OpenAI::Models::Responses::ResponseComputerToolCall::Action::Drag::Path, OpenAI::Util::AnyHash)], type: Symbol ) .returns(T.attached_class) diff --git a/rbi/lib/openai/models/responses/response_computer_tool_call_output_item.rbi b/rbi/lib/openai/models/responses/response_computer_tool_call_output_item.rbi index 9e28ddee..93d2029f 100644 --- a/rbi/lib/openai/models/responses/response_computer_tool_call_output_item.rbi +++ b/rbi/lib/openai/models/responses/response_computer_tool_call_output_item.rbi @@ -59,9 +59,21 @@ module OpenAI sig do params( - _: T::Array[OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::AcknowledgedSafetyCheck] + _: T::Array[ + T.any( + OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::AcknowledgedSafetyCheck, + OpenAI::Util::AnyHash + ) + ] ) - .returns(T::Array[OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::AcknowledgedSafetyCheck]) + .returns( + T::Array[ + T.any( + OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::AcknowledgedSafetyCheck, + OpenAI::Util::AnyHash + ) + ] + ) end def acknowledged_safety_checks=(_) end @@ -73,8 +85,8 @@ module OpenAI end sig do - params(_: OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::Status::TaggedSymbol) - .returns(OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::Status::TaggedSymbol) + params(_: OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::Status::OrSymbol) + .returns(OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::Status::OrSymbol) end def status=(_) end @@ -84,8 +96,13 @@ module OpenAI id: String, call_id: String, output: T.any(OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot, OpenAI::Util::AnyHash), - acknowledged_safety_checks: T::Array[OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::AcknowledgedSafetyCheck], - status: OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::Status::TaggedSymbol, + acknowledged_safety_checks: T::Array[ + T.any( + OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::AcknowledgedSafetyCheck, + OpenAI::Util::AnyHash + ) + ], + status: OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::Status::OrSymbol, type: Symbol ) .returns(T.attached_class) diff --git a/rbi/lib/openai/models/responses/response_content_part_added_event.rbi b/rbi/lib/openai/models/responses/response_content_part_added_event.rbi index ac337005..b7e8bd20 100644 --- a/rbi/lib/openai/models/responses/response_content_part_added_event.rbi +++ b/rbi/lib/openai/models/responses/response_content_part_added_event.rbi @@ -66,7 +66,11 @@ module OpenAI content_index: Integer, item_id: String, output_index: Integer, - part: T.any(OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal), + part: T.any( + OpenAI::Models::Responses::ResponseOutputText, + OpenAI::Util::AnyHash, + OpenAI::Models::Responses::ResponseOutputRefusal + ), type: Symbol ) .returns(T.attached_class) diff --git a/rbi/lib/openai/models/responses/response_content_part_done_event.rbi b/rbi/lib/openai/models/responses/response_content_part_done_event.rbi index d6185fc4..566da131 100644 --- a/rbi/lib/openai/models/responses/response_content_part_done_event.rbi +++ b/rbi/lib/openai/models/responses/response_content_part_done_event.rbi @@ -66,7 +66,11 @@ module OpenAI content_index: Integer, item_id: String, output_index: Integer, - part: T.any(OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal), + part: T.any( + OpenAI::Models::Responses::ResponseOutputText, + OpenAI::Util::AnyHash, + OpenAI::Models::Responses::ResponseOutputRefusal + ), type: Symbol ) .returns(T.attached_class) diff --git a/rbi/lib/openai/models/responses/response_create_params.rbi b/rbi/lib/openai/models/responses/response_create_params.rbi index 7e1109eb..9cf8f37d 100644 --- a/rbi/lib/openai/models/responses/response_create_params.rbi +++ b/rbi/lib/openai/models/responses/response_create_params.rbi @@ -254,6 +254,7 @@ module OpenAI _: T.any( OpenAI::Models::Responses::ToolChoiceOptions::OrSymbol, OpenAI::Models::Responses::ToolChoiceTypes, + OpenAI::Util::AnyHash, OpenAI::Models::Responses::ToolChoiceFunction ) ) @@ -261,6 +262,7 @@ module OpenAI T.any( OpenAI::Models::Responses::ToolChoiceOptions::OrSymbol, OpenAI::Models::Responses::ToolChoiceTypes, + OpenAI::Util::AnyHash, OpenAI::Models::Responses::ToolChoiceFunction ) ) @@ -304,6 +306,7 @@ module OpenAI _: T::Array[ T.any( OpenAI::Models::Responses::FileSearchTool, + OpenAI::Util::AnyHash, OpenAI::Models::Responses::FunctionTool, OpenAI::Models::Responses::ComputerTool, OpenAI::Models::Responses::WebSearchTool @@ -314,6 +317,7 @@ module OpenAI T::Array[ T.any( OpenAI::Models::Responses::FileSearchTool, + OpenAI::Util::AnyHash, OpenAI::Models::Responses::FunctionTool, OpenAI::Models::Responses::ComputerTool, OpenAI::Models::Responses::WebSearchTool @@ -373,6 +377,7 @@ module OpenAI T::Array[ T.any( OpenAI::Models::Responses::EasyInputMessage, + OpenAI::Util::AnyHash, OpenAI::Models::Responses::ResponseInputItem::Message, OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, @@ -400,11 +405,13 @@ module OpenAI tool_choice: T.any( OpenAI::Models::Responses::ToolChoiceOptions::OrSymbol, OpenAI::Models::Responses::ToolChoiceTypes, + OpenAI::Util::AnyHash, OpenAI::Models::Responses::ToolChoiceFunction ), tools: T::Array[ T.any( OpenAI::Models::Responses::FileSearchTool, + OpenAI::Util::AnyHash, OpenAI::Models::Responses::FunctionTool, OpenAI::Models::Responses::ComputerTool, OpenAI::Models::Responses::WebSearchTool diff --git a/rbi/lib/openai/models/responses/response_error.rbi b/rbi/lib/openai/models/responses/response_error.rbi index b2a69187..8b28aae4 100644 --- a/rbi/lib/openai/models/responses/response_error.rbi +++ b/rbi/lib/openai/models/responses/response_error.rbi @@ -27,7 +27,7 @@ module OpenAI # An error object returned when the model fails to generate a Response. sig do - params(code: OpenAI::Models::Responses::ResponseError::Code::TaggedSymbol, message: String) + params(code: OpenAI::Models::Responses::ResponseError::Code::OrSymbol, message: String) .returns(T.attached_class) end def self.new(code:, message:) diff --git a/rbi/lib/openai/models/responses/response_file_search_tool_call.rbi b/rbi/lib/openai/models/responses/response_file_search_tool_call.rbi index e3a77e36..957f8061 100644 --- a/rbi/lib/openai/models/responses/response_file_search_tool_call.rbi +++ b/rbi/lib/openai/models/responses/response_file_search_tool_call.rbi @@ -64,7 +64,9 @@ module OpenAI id: String, queries: T::Array[String], status: OpenAI::Models::Responses::ResponseFileSearchToolCall::Status::OrSymbol, - results: T.nilable(T::Array[OpenAI::Models::Responses::ResponseFileSearchToolCall::Result]), + results: T.nilable( + T::Array[T.any(OpenAI::Models::Responses::ResponseFileSearchToolCall::Result, OpenAI::Util::AnyHash)] + ), type: Symbol ) .returns(T.attached_class) diff --git a/rbi/lib/openai/models/responses/response_function_tool_call_output_item.rbi b/rbi/lib/openai/models/responses/response_function_tool_call_output_item.rbi index bb1f3e0a..f78c8d84 100644 --- a/rbi/lib/openai/models/responses/response_function_tool_call_output_item.rbi +++ b/rbi/lib/openai/models/responses/response_function_tool_call_output_item.rbi @@ -47,8 +47,8 @@ module OpenAI end sig do - params(_: OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::Status::TaggedSymbol) - .returns(OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::Status::TaggedSymbol) + params(_: OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::Status::OrSymbol) + .returns(OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::Status::OrSymbol) end def status=(_) end @@ -58,7 +58,7 @@ module OpenAI id: String, call_id: String, output: String, - status: OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::Status::TaggedSymbol, + status: OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::Status::OrSymbol, type: Symbol ) .returns(T.attached_class) diff --git a/rbi/lib/openai/models/responses/response_input_item.rbi b/rbi/lib/openai/models/responses/response_input_item.rbi index 9d56daf4..74e9e516 100644 --- a/rbi/lib/openai/models/responses/response_input_item.rbi +++ b/rbi/lib/openai/models/responses/response_input_item.rbi @@ -115,6 +115,7 @@ module OpenAI content: T::Array[ T.any( OpenAI::Models::Responses::ResponseInputText, + OpenAI::Util::AnyHash, OpenAI::Models::Responses::ResponseInputImage, OpenAI::Models::Responses::ResponseInputFile ) @@ -267,10 +268,20 @@ module OpenAI sig do params( - _: T::Array[OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::AcknowledgedSafetyCheck] + _: T::Array[ + T.any( + OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::AcknowledgedSafetyCheck, + OpenAI::Util::AnyHash + ) + ] ) .returns( - T::Array[OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::AcknowledgedSafetyCheck] + T::Array[ + T.any( + OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::AcknowledgedSafetyCheck, + OpenAI::Util::AnyHash + ) + ] ) end def acknowledged_safety_checks=(_) @@ -295,7 +306,12 @@ module OpenAI call_id: String, output: T.any(OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot, OpenAI::Util::AnyHash), id: String, - acknowledged_safety_checks: T::Array[OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::AcknowledgedSafetyCheck], + acknowledged_safety_checks: T::Array[ + T.any( + OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::AcknowledgedSafetyCheck, + OpenAI::Util::AnyHash + ) + ], status: OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::Status::OrSymbol, type: Symbol ) diff --git a/rbi/lib/openai/models/responses/response_input_message_item.rbi b/rbi/lib/openai/models/responses/response_input_message_item.rbi index 1c80f8f2..e6579bda 100644 --- a/rbi/lib/openai/models/responses/response_input_message_item.rbi +++ b/rbi/lib/openai/models/responses/response_input_message_item.rbi @@ -71,8 +71,8 @@ module OpenAI end sig do - params(_: OpenAI::Models::Responses::ResponseInputMessageItem::Status::TaggedSymbol) - .returns(OpenAI::Models::Responses::ResponseInputMessageItem::Status::TaggedSymbol) + params(_: OpenAI::Models::Responses::ResponseInputMessageItem::Status::OrSymbol) + .returns(OpenAI::Models::Responses::ResponseInputMessageItem::Status::OrSymbol) end def status=(_) end @@ -83,8 +83,8 @@ module OpenAI end sig do - params(_: OpenAI::Models::Responses::ResponseInputMessageItem::Type::TaggedSymbol) - .returns(OpenAI::Models::Responses::ResponseInputMessageItem::Type::TaggedSymbol) + params(_: OpenAI::Models::Responses::ResponseInputMessageItem::Type::OrSymbol) + .returns(OpenAI::Models::Responses::ResponseInputMessageItem::Type::OrSymbol) end def type=(_) end @@ -95,13 +95,14 @@ module OpenAI content: T::Array[ T.any( OpenAI::Models::Responses::ResponseInputText, + OpenAI::Util::AnyHash, OpenAI::Models::Responses::ResponseInputImage, OpenAI::Models::Responses::ResponseInputFile ) ], - role: OpenAI::Models::Responses::ResponseInputMessageItem::Role::TaggedSymbol, - status: OpenAI::Models::Responses::ResponseInputMessageItem::Status::TaggedSymbol, - type: OpenAI::Models::Responses::ResponseInputMessageItem::Type::TaggedSymbol + role: OpenAI::Models::Responses::ResponseInputMessageItem::Role::OrSymbol, + status: OpenAI::Models::Responses::ResponseInputMessageItem::Status::OrSymbol, + type: OpenAI::Models::Responses::ResponseInputMessageItem::Type::OrSymbol ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/responses/response_item_list.rbi b/rbi/lib/openai/models/responses/response_item_list.rbi index 301dcc40..afdd9b6d 100644 --- a/rbi/lib/openai/models/responses/response_item_list.rbi +++ b/rbi/lib/openai/models/responses/response_item_list.rbi @@ -99,6 +99,7 @@ module OpenAI data: T::Array[ T.any( OpenAI::Models::Responses::ResponseInputMessageItem, + OpenAI::Util::AnyHash, OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseComputerToolCall, diff --git a/rbi/lib/openai/models/responses/response_output_item_added_event.rbi b/rbi/lib/openai/models/responses/response_output_item_added_event.rbi index 0e49a206..42d8f23b 100644 --- a/rbi/lib/openai/models/responses/response_output_item_added_event.rbi +++ b/rbi/lib/openai/models/responses/response_output_item_added_event.rbi @@ -68,6 +68,7 @@ module OpenAI params( item: T.any( OpenAI::Models::Responses::ResponseOutputMessage, + OpenAI::Util::AnyHash, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseFunctionWebSearch, diff --git a/rbi/lib/openai/models/responses/response_output_item_done_event.rbi b/rbi/lib/openai/models/responses/response_output_item_done_event.rbi index e53adef5..0e58ab9d 100644 --- a/rbi/lib/openai/models/responses/response_output_item_done_event.rbi +++ b/rbi/lib/openai/models/responses/response_output_item_done_event.rbi @@ -68,6 +68,7 @@ module OpenAI params( item: T.any( OpenAI::Models::Responses::ResponseOutputMessage, + OpenAI::Util::AnyHash, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseFunctionWebSearch, diff --git a/rbi/lib/openai/models/responses/response_output_message.rbi b/rbi/lib/openai/models/responses/response_output_message.rbi index 681787d8..9b6bf634 100644 --- a/rbi/lib/openai/models/responses/response_output_message.rbi +++ b/rbi/lib/openai/models/responses/response_output_message.rbi @@ -68,7 +68,13 @@ module OpenAI sig do params( id: String, - content: T::Array[T.any(OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal)], + content: T::Array[ + T.any( + OpenAI::Models::Responses::ResponseOutputText, + OpenAI::Util::AnyHash, + OpenAI::Models::Responses::ResponseOutputRefusal + ) + ], status: OpenAI::Models::Responses::ResponseOutputMessage::Status::OrSymbol, role: Symbol, type: Symbol diff --git a/rbi/lib/openai/models/responses/response_output_text.rbi b/rbi/lib/openai/models/responses/response_output_text.rbi index 2b88cd02..5c365bc5 100644 --- a/rbi/lib/openai/models/responses/response_output_text.rbi +++ b/rbi/lib/openai/models/responses/response_output_text.rbi @@ -66,6 +66,7 @@ module OpenAI annotations: T::Array[ T.any( OpenAI::Models::Responses::ResponseOutputText::Annotation::FileCitation, + OpenAI::Util::AnyHash, OpenAI::Models::Responses::ResponseOutputText::Annotation::URLCitation, OpenAI::Models::Responses::ResponseOutputText::Annotation::FilePath ) diff --git a/rbi/lib/openai/models/responses/response_reasoning_item.rbi b/rbi/lib/openai/models/responses/response_reasoning_item.rbi index b31a3e74..12f95019 100644 --- a/rbi/lib/openai/models/responses/response_reasoning_item.rbi +++ b/rbi/lib/openai/models/responses/response_reasoning_item.rbi @@ -52,7 +52,7 @@ module OpenAI sig do params( id: String, - summary: T::Array[OpenAI::Models::Responses::ResponseReasoningItem::Summary], + summary: T::Array[T.any(OpenAI::Models::Responses::ResponseReasoningItem::Summary, OpenAI::Util::AnyHash)], status: OpenAI::Models::Responses::ResponseReasoningItem::Status::OrSymbol, type: Symbol ) diff --git a/rbi/lib/openai/models/responses/response_text_annotation_delta_event.rbi b/rbi/lib/openai/models/responses/response_text_annotation_delta_event.rbi index c557f685..2be76360 100644 --- a/rbi/lib/openai/models/responses/response_text_annotation_delta_event.rbi +++ b/rbi/lib/openai/models/responses/response_text_annotation_delta_event.rbi @@ -86,6 +86,7 @@ module OpenAI params( annotation: T.any( OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FileCitation, + OpenAI::Util::AnyHash, OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::URLCitation, OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FilePath ), diff --git a/rbi/lib/openai/models/responses/response_text_config.rbi b/rbi/lib/openai/models/responses/response_text_config.rbi index 2287e496..a04a62b1 100644 --- a/rbi/lib/openai/models/responses/response_text_config.rbi +++ b/rbi/lib/openai/models/responses/response_text_config.rbi @@ -35,6 +35,7 @@ module OpenAI params( _: T.any( OpenAI::Models::ResponseFormatText, + OpenAI::Util::AnyHash, OpenAI::Models::Responses::ResponseFormatTextJSONSchemaConfig, OpenAI::Models::ResponseFormatJSONObject ) @@ -42,6 +43,7 @@ module OpenAI .returns( T.any( OpenAI::Models::ResponseFormatText, + OpenAI::Util::AnyHash, OpenAI::Models::Responses::ResponseFormatTextJSONSchemaConfig, OpenAI::Models::ResponseFormatJSONObject ) @@ -59,6 +61,7 @@ module OpenAI params( format_: T.any( OpenAI::Models::ResponseFormatText, + OpenAI::Util::AnyHash, OpenAI::Models::Responses::ResponseFormatTextJSONSchemaConfig, OpenAI::Models::ResponseFormatJSONObject ) diff --git a/rbi/lib/openai/models/upload.rbi b/rbi/lib/openai/models/upload.rbi index 2ce22a6c..949a6347 100644 --- a/rbi/lib/openai/models/upload.rbi +++ b/rbi/lib/openai/models/upload.rbi @@ -101,7 +101,7 @@ module OpenAI expires_at: Integer, filename: String, purpose: String, - status: OpenAI::Models::Upload::Status::TaggedSymbol, + status: OpenAI::Models::Upload::Status::OrSymbol, file: T.nilable(T.any(OpenAI::Models::FileObject, OpenAI::Util::AnyHash)), object: Symbol ) diff --git a/rbi/lib/openai/models/vector_store.rbi b/rbi/lib/openai/models/vector_store.rbi index 2cb7c946..a3dc0af1 100644 --- a/rbi/lib/openai/models/vector_store.rbi +++ b/rbi/lib/openai/models/vector_store.rbi @@ -127,7 +127,7 @@ module OpenAI last_active_at: T.nilable(Integer), metadata: T.nilable(T::Hash[Symbol, String]), name: String, - status: OpenAI::Models::VectorStore::Status::TaggedSymbol, + status: OpenAI::Models::VectorStore::Status::OrSymbol, usage_bytes: Integer, expires_after: T.any(OpenAI::Models::VectorStore::ExpiresAfter, OpenAI::Util::AnyHash), expires_at: T.nilable(Integer), diff --git a/rbi/lib/openai/models/vector_store_create_params.rbi b/rbi/lib/openai/models/vector_store_create_params.rbi index f6aad6ef..54ca2316 100644 --- a/rbi/lib/openai/models/vector_store_create_params.rbi +++ b/rbi/lib/openai/models/vector_store_create_params.rbi @@ -25,12 +25,14 @@ module OpenAI params( _: T.any( OpenAI::Models::AutoFileChunkingStrategyParam, + OpenAI::Util::AnyHash, OpenAI::Models::StaticFileChunkingStrategyObjectParam ) ) .returns( T.any( OpenAI::Models::AutoFileChunkingStrategyParam, + OpenAI::Util::AnyHash, OpenAI::Models::StaticFileChunkingStrategyObjectParam ) ) @@ -88,6 +90,7 @@ module OpenAI params( chunking_strategy: T.any( OpenAI::Models::AutoFileChunkingStrategyParam, + OpenAI::Util::AnyHash, OpenAI::Models::StaticFileChunkingStrategyObjectParam ), expires_after: T.any(OpenAI::Models::VectorStoreCreateParams::ExpiresAfter, OpenAI::Util::AnyHash), diff --git a/rbi/lib/openai/models/vector_store_search_params.rbi b/rbi/lib/openai/models/vector_store_search_params.rbi index 4145160a..40bccf18 100644 --- a/rbi/lib/openai/models/vector_store_search_params.rbi +++ b/rbi/lib/openai/models/vector_store_search_params.rbi @@ -21,8 +21,8 @@ module OpenAI end sig do - params(_: T.any(OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter)) - .returns(T.any(OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter)) + params(_: T.any(OpenAI::Models::ComparisonFilter, OpenAI::Util::AnyHash, OpenAI::Models::CompoundFilter)) + .returns(T.any(OpenAI::Models::ComparisonFilter, OpenAI::Util::AnyHash, OpenAI::Models::CompoundFilter)) end def filters=(_) end @@ -61,7 +61,7 @@ module OpenAI sig do params( query: T.any(String, T::Array[String]), - filters: T.any(OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter), + filters: T.any(OpenAI::Models::ComparisonFilter, OpenAI::Util::AnyHash, OpenAI::Models::CompoundFilter), max_num_results: Integer, ranking_options: T.any(OpenAI::Models::VectorStoreSearchParams::RankingOptions, OpenAI::Util::AnyHash), rewrite_query: T::Boolean, diff --git a/rbi/lib/openai/models/vector_store_search_response.rbi b/rbi/lib/openai/models/vector_store_search_response.rbi index 22a47028..1847ed36 100644 --- a/rbi/lib/openai/models/vector_store_search_response.rbi +++ b/rbi/lib/openai/models/vector_store_search_response.rbi @@ -61,7 +61,7 @@ module OpenAI sig do params( attributes: T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)]), - content: T::Array[OpenAI::Models::VectorStoreSearchResponse::Content], + content: T::Array[T.any(OpenAI::Models::VectorStoreSearchResponse::Content, OpenAI::Util::AnyHash)], file_id: String, filename: String, score: Float @@ -121,7 +121,7 @@ module OpenAI end sig do - params(text: String, type: OpenAI::Models::VectorStoreSearchResponse::Content::Type::TaggedSymbol) + params(text: String, type: OpenAI::Models::VectorStoreSearchResponse::Content::Type::OrSymbol) .returns(T.attached_class) end def self.new(text:, type:) diff --git a/rbi/lib/openai/models/vector_stores/file_batch_create_params.rbi b/rbi/lib/openai/models/vector_stores/file_batch_create_params.rbi index 2f203007..1a242819 100644 --- a/rbi/lib/openai/models/vector_stores/file_batch_create_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_batch_create_params.rbi @@ -53,12 +53,14 @@ module OpenAI params( _: T.any( OpenAI::Models::AutoFileChunkingStrategyParam, + OpenAI::Util::AnyHash, OpenAI::Models::StaticFileChunkingStrategyObjectParam ) ) .returns( T.any( OpenAI::Models::AutoFileChunkingStrategyParam, + OpenAI::Util::AnyHash, OpenAI::Models::StaticFileChunkingStrategyObjectParam ) ) @@ -72,6 +74,7 @@ module OpenAI attributes: T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)]), chunking_strategy: T.any( OpenAI::Models::AutoFileChunkingStrategyParam, + OpenAI::Util::AnyHash, OpenAI::Models::StaticFileChunkingStrategyObjectParam ), request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) diff --git a/rbi/lib/openai/models/vector_stores/file_create_params.rbi b/rbi/lib/openai/models/vector_stores/file_create_params.rbi index dac1b7e6..e84068ca 100644 --- a/rbi/lib/openai/models/vector_stores/file_create_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_create_params.rbi @@ -53,12 +53,14 @@ module OpenAI params( _: T.any( OpenAI::Models::AutoFileChunkingStrategyParam, + OpenAI::Util::AnyHash, OpenAI::Models::StaticFileChunkingStrategyObjectParam ) ) .returns( T.any( OpenAI::Models::AutoFileChunkingStrategyParam, + OpenAI::Util::AnyHash, OpenAI::Models::StaticFileChunkingStrategyObjectParam ) ) @@ -72,6 +74,7 @@ module OpenAI attributes: T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)]), chunking_strategy: T.any( OpenAI::Models::AutoFileChunkingStrategyParam, + OpenAI::Util::AnyHash, OpenAI::Models::StaticFileChunkingStrategyObjectParam ), request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) diff --git a/rbi/lib/openai/models/vector_stores/vector_store_file.rbi b/rbi/lib/openai/models/vector_stores/vector_store_file.rbi index 6ddab8f0..7d337918 100644 --- a/rbi/lib/openai/models/vector_stores/vector_store_file.rbi +++ b/rbi/lib/openai/models/vector_stores/vector_store_file.rbi @@ -111,10 +111,18 @@ module OpenAI sig do params( - _: T.any(OpenAI::Models::StaticFileChunkingStrategyObject, OpenAI::Models::OtherFileChunkingStrategyObject) + _: T.any( + OpenAI::Models::StaticFileChunkingStrategyObject, + OpenAI::Util::AnyHash, + OpenAI::Models::OtherFileChunkingStrategyObject + ) ) .returns( - T.any(OpenAI::Models::StaticFileChunkingStrategyObject, OpenAI::Models::OtherFileChunkingStrategyObject) + T.any( + OpenAI::Models::StaticFileChunkingStrategyObject, + OpenAI::Util::AnyHash, + OpenAI::Models::OtherFileChunkingStrategyObject + ) ) end def chunking_strategy=(_) @@ -126,11 +134,15 @@ module OpenAI id: String, created_at: Integer, last_error: T.nilable(T.any(OpenAI::Models::VectorStores::VectorStoreFile::LastError, OpenAI::Util::AnyHash)), - status: OpenAI::Models::VectorStores::VectorStoreFile::Status::TaggedSymbol, + status: OpenAI::Models::VectorStores::VectorStoreFile::Status::OrSymbol, usage_bytes: Integer, vector_store_id: String, attributes: T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)]), - chunking_strategy: T.any(OpenAI::Models::StaticFileChunkingStrategyObject, OpenAI::Models::OtherFileChunkingStrategyObject), + chunking_strategy: T.any( + OpenAI::Models::StaticFileChunkingStrategyObject, + OpenAI::Util::AnyHash, + OpenAI::Models::OtherFileChunkingStrategyObject + ), object: Symbol ) .returns(T.attached_class) @@ -192,10 +204,7 @@ module OpenAI # The last error associated with this vector store file. Will be `null` if there # are no errors. sig do - params( - code: OpenAI::Models::VectorStores::VectorStoreFile::LastError::Code::TaggedSymbol, - message: String - ) + params(code: OpenAI::Models::VectorStores::VectorStoreFile::LastError::Code::OrSymbol, message: String) .returns(T.attached_class) end def self.new(code:, message:) diff --git a/rbi/lib/openai/models/vector_stores/vector_store_file_batch.rbi b/rbi/lib/openai/models/vector_stores/vector_store_file_batch.rbi index 47124b6e..e2b512ef 100644 --- a/rbi/lib/openai/models/vector_stores/vector_store_file_batch.rbi +++ b/rbi/lib/openai/models/vector_stores/vector_store_file_batch.rbi @@ -74,7 +74,7 @@ module OpenAI id: String, created_at: Integer, file_counts: T.any(OpenAI::Models::VectorStores::VectorStoreFileBatch::FileCounts, OpenAI::Util::AnyHash), - status: OpenAI::Models::VectorStores::VectorStoreFileBatch::Status::TaggedSymbol, + status: OpenAI::Models::VectorStores::VectorStoreFileBatch::Status::OrSymbol, vector_store_id: String, object: Symbol ) diff --git a/rbi/lib/openai/resources/beta/assistants.rbi b/rbi/lib/openai/resources/beta/assistants.rbi index 3205f05d..5b415b26 100644 --- a/rbi/lib/openai/resources/beta/assistants.rbi +++ b/rbi/lib/openai/resources/beta/assistants.rbi @@ -17,15 +17,17 @@ module OpenAI T.any( Symbol, OpenAI::Models::ResponseFormatText, + OpenAI::Util::AnyHash, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema ) ), temperature: T.nilable(Float), - tool_resources: T.nilable(OpenAI::Models::Beta::AssistantCreateParams::ToolResources), + tool_resources: T.nilable(T.any(OpenAI::Models::Beta::AssistantCreateParams::ToolResources, OpenAI::Util::AnyHash)), tools: T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::FileSearchTool, OpenAI::Models::Beta::FunctionTool ) @@ -136,15 +138,17 @@ module OpenAI T.any( Symbol, OpenAI::Models::ResponseFormatText, + OpenAI::Util::AnyHash, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema ) ), temperature: T.nilable(Float), - tool_resources: T.nilable(OpenAI::Models::Beta::AssistantUpdateParams::ToolResources), + tool_resources: T.nilable(T.any(OpenAI::Models::Beta::AssistantUpdateParams::ToolResources, OpenAI::Util::AnyHash)), tools: T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::FileSearchTool, OpenAI::Models::Beta::FunctionTool ) diff --git a/rbi/lib/openai/resources/beta/threads.rbi b/rbi/lib/openai/resources/beta/threads.rbi index 65c67e5a..9e668eee 100644 --- a/rbi/lib/openai/resources/beta/threads.rbi +++ b/rbi/lib/openai/resources/beta/threads.rbi @@ -15,9 +15,9 @@ module OpenAI # Create a thread. sig do params( - messages: T::Array[OpenAI::Models::Beta::ThreadCreateParams::Message], + messages: T::Array[T.any(OpenAI::Models::Beta::ThreadCreateParams::Message, OpenAI::Util::AnyHash)], metadata: T.nilable(T::Hash[Symbol, String]), - tool_resources: T.nilable(OpenAI::Models::Beta::ThreadCreateParams::ToolResources), + tool_resources: T.nilable(T.any(OpenAI::Models::Beta::ThreadCreateParams::ToolResources, OpenAI::Util::AnyHash)), request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) ) .returns(OpenAI::Models::Beta::Thread) @@ -62,7 +62,7 @@ module OpenAI params( thread_id: String, metadata: T.nilable(T::Hash[Symbol, String]), - tool_resources: T.nilable(OpenAI::Models::Beta::ThreadUpdateParams::ToolResources), + tool_resources: T.nilable(T.any(OpenAI::Models::Beta::ThreadUpdateParams::ToolResources, OpenAI::Util::AnyHash)), request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) ) .returns(OpenAI::Models::Beta::Thread) @@ -115,30 +115,35 @@ module OpenAI T.any( Symbol, OpenAI::Models::ResponseFormatText, + OpenAI::Util::AnyHash, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema ) ), temperature: T.nilable(Float), - thread: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread, + thread: T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread, OpenAI::Util::AnyHash), tool_choice: T.nilable( T.any( OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::OrSymbol, - OpenAI::Models::Beta::AssistantToolChoice + OpenAI::Models::Beta::AssistantToolChoice, + OpenAI::Util::AnyHash ) ), - tool_resources: T.nilable(OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources), + tool_resources: T.nilable(T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources, OpenAI::Util::AnyHash)), tools: T.nilable( T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::FileSearchTool, OpenAI::Models::Beta::FunctionTool ) ] ), top_p: T.nilable(Float), - truncation_strategy: T.nilable(OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy), + truncation_strategy: T.nilable( + T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy, OpenAI::Util::AnyHash) + ), stream: T.noreturn, request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) ) @@ -254,30 +259,35 @@ module OpenAI T.any( Symbol, OpenAI::Models::ResponseFormatText, + OpenAI::Util::AnyHash, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema ) ), temperature: T.nilable(Float), - thread: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread, + thread: T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread, OpenAI::Util::AnyHash), tool_choice: T.nilable( T.any( OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::OrSymbol, - OpenAI::Models::Beta::AssistantToolChoice + OpenAI::Models::Beta::AssistantToolChoice, + OpenAI::Util::AnyHash ) ), - tool_resources: T.nilable(OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources), + tool_resources: T.nilable(T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources, OpenAI::Util::AnyHash)), tools: T.nilable( T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::FileSearchTool, OpenAI::Models::Beta::FunctionTool ) ] ), top_p: T.nilable(Float), - truncation_strategy: T.nilable(OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy), + truncation_strategy: T.nilable( + T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy, OpenAI::Util::AnyHash) + ), stream: T.noreturn, request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) ) diff --git a/rbi/lib/openai/resources/beta/threads/messages.rbi b/rbi/lib/openai/resources/beta/threads/messages.rbi index d83a3d17..24507cc6 100644 --- a/rbi/lib/openai/resources/beta/threads/messages.rbi +++ b/rbi/lib/openai/resources/beta/threads/messages.rbi @@ -14,13 +14,16 @@ module OpenAI T::Array[ T.any( OpenAI::Models::Beta::Threads::ImageFileContentBlock, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::Threads::ImageURLContentBlock, OpenAI::Models::Beta::Threads::TextContentBlockParam ) ] ), role: OpenAI::Models::Beta::Threads::MessageCreateParams::Role::OrSymbol, - attachments: T.nilable(T::Array[OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment]), + attachments: T.nilable( + T::Array[T.any(OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment, OpenAI::Util::AnyHash)] + ), metadata: T.nilable(T::Hash[Symbol, String]), request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) ) diff --git a/rbi/lib/openai/resources/beta/threads/runs.rbi b/rbi/lib/openai/resources/beta/threads/runs.rbi index 44b7f356..d797090c 100644 --- a/rbi/lib/openai/resources/beta/threads/runs.rbi +++ b/rbi/lib/openai/resources/beta/threads/runs.rbi @@ -16,7 +16,9 @@ module OpenAI assistant_id: String, include: T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol], additional_instructions: T.nilable(String), - additional_messages: T.nilable(T::Array[OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage]), + additional_messages: T.nilable( + T::Array[T.any(OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage, OpenAI::Util::AnyHash)] + ), instructions: T.nilable(String), max_completion_tokens: T.nilable(Integer), max_prompt_tokens: T.nilable(Integer), @@ -28,6 +30,7 @@ module OpenAI T.any( Symbol, OpenAI::Models::ResponseFormatText, + OpenAI::Util::AnyHash, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema ) @@ -36,20 +39,24 @@ module OpenAI tool_choice: T.nilable( T.any( OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::OrSymbol, - OpenAI::Models::Beta::AssistantToolChoice + OpenAI::Models::Beta::AssistantToolChoice, + OpenAI::Util::AnyHash ) ), tools: T.nilable( T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::FileSearchTool, OpenAI::Models::Beta::FunctionTool ) ] ), top_p: T.nilable(Float), - truncation_strategy: T.nilable(OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy), + truncation_strategy: T.nilable( + T.any(OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy, OpenAI::Util::AnyHash) + ), stream: T.noreturn, request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) ) @@ -178,7 +185,9 @@ module OpenAI assistant_id: String, include: T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol], additional_instructions: T.nilable(String), - additional_messages: T.nilable(T::Array[OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage]), + additional_messages: T.nilable( + T::Array[T.any(OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage, OpenAI::Util::AnyHash)] + ), instructions: T.nilable(String), max_completion_tokens: T.nilable(Integer), max_prompt_tokens: T.nilable(Integer), @@ -190,6 +199,7 @@ module OpenAI T.any( Symbol, OpenAI::Models::ResponseFormatText, + OpenAI::Util::AnyHash, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema ) @@ -198,20 +208,24 @@ module OpenAI tool_choice: T.nilable( T.any( OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::OrSymbol, - OpenAI::Models::Beta::AssistantToolChoice + OpenAI::Models::Beta::AssistantToolChoice, + OpenAI::Util::AnyHash ) ), tools: T.nilable( T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::FileSearchTool, OpenAI::Models::Beta::FunctionTool ) ] ), top_p: T.nilable(Float), - truncation_strategy: T.nilable(OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy), + truncation_strategy: T.nilable( + T.any(OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy, OpenAI::Util::AnyHash) + ), stream: T.noreturn, request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) ) @@ -469,7 +483,7 @@ module OpenAI params( run_id: String, thread_id: String, - tool_outputs: T::Array[OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams::ToolOutput], + tool_outputs: T::Array[T.any(OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams::ToolOutput, OpenAI::Util::AnyHash)], stream: T.noreturn, request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) ) @@ -500,7 +514,7 @@ module OpenAI params( run_id: String, thread_id: String, - tool_outputs: T::Array[OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams::ToolOutput], + tool_outputs: T::Array[T.any(OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams::ToolOutput, OpenAI::Util::AnyHash)], stream: T.noreturn, request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) ) diff --git a/rbi/lib/openai/resources/chat/completions.rbi b/rbi/lib/openai/resources/chat/completions.rbi index 00b8c661..91d89d3d 100644 --- a/rbi/lib/openai/resources/chat/completions.rbi +++ b/rbi/lib/openai/resources/chat/completions.rbi @@ -30,6 +30,7 @@ module OpenAI messages: T::Array[ T.any( OpenAI::Models::Chat::ChatCompletionDeveloperMessageParam, + OpenAI::Util::AnyHash, OpenAI::Models::Chat::ChatCompletionSystemMessageParam, OpenAI::Models::Chat::ChatCompletionUserMessageParam, OpenAI::Models::Chat::ChatCompletionAssistantMessageParam, @@ -38,13 +39,14 @@ module OpenAI ) ], model: T.any(String, OpenAI::Models::ChatModel::OrSymbol), - audio: T.nilable(OpenAI::Models::Chat::ChatCompletionAudioParam), + audio: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionAudioParam, OpenAI::Util::AnyHash)), frequency_penalty: T.nilable(Float), function_call: T.any( OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode::OrSymbol, - OpenAI::Models::Chat::ChatCompletionFunctionCallOption + OpenAI::Models::Chat::ChatCompletionFunctionCallOption, + OpenAI::Util::AnyHash ), - functions: T::Array[OpenAI::Models::Chat::CompletionCreateParams::Function], + functions: T::Array[T.any(OpenAI::Models::Chat::CompletionCreateParams::Function, OpenAI::Util::AnyHash)], logit_bias: T.nilable(T::Hash[Symbol, Integer]), logprobs: T.nilable(T::Boolean), max_completion_tokens: T.nilable(Integer), @@ -53,11 +55,12 @@ module OpenAI modalities: T.nilable(T::Array[OpenAI::Models::Chat::CompletionCreateParams::Modality::OrSymbol]), n: T.nilable(Integer), parallel_tool_calls: T::Boolean, - prediction: T.nilable(OpenAI::Models::Chat::ChatCompletionPredictionContent), + prediction: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionPredictionContent, OpenAI::Util::AnyHash)), presence_penalty: T.nilable(Float), reasoning_effort: T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol), response_format: T.any( OpenAI::Models::ResponseFormatText, + OpenAI::Util::AnyHash, OpenAI::Models::ResponseFormatJSONSchema, OpenAI::Models::ResponseFormatJSONObject ), @@ -65,17 +68,18 @@ module OpenAI service_tier: T.nilable(OpenAI::Models::Chat::CompletionCreateParams::ServiceTier::OrSymbol), stop: T.nilable(T.any(String, T::Array[String])), store: T.nilable(T::Boolean), - stream_options: T.nilable(OpenAI::Models::Chat::ChatCompletionStreamOptions), + stream_options: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionStreamOptions, OpenAI::Util::AnyHash)), temperature: T.nilable(Float), tool_choice: T.any( OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto::OrSymbol, - OpenAI::Models::Chat::ChatCompletionNamedToolChoice + OpenAI::Models::Chat::ChatCompletionNamedToolChoice, + OpenAI::Util::AnyHash ), - tools: T::Array[OpenAI::Models::Chat::ChatCompletionTool], + tools: T::Array[T.any(OpenAI::Models::Chat::ChatCompletionTool, OpenAI::Util::AnyHash)], top_logprobs: T.nilable(Integer), top_p: T.nilable(Float), user: String, - web_search_options: OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions, + web_search_options: T.any(OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions, OpenAI::Util::AnyHash), stream: T.noreturn, request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) ) @@ -294,6 +298,7 @@ module OpenAI messages: T::Array[ T.any( OpenAI::Models::Chat::ChatCompletionDeveloperMessageParam, + OpenAI::Util::AnyHash, OpenAI::Models::Chat::ChatCompletionSystemMessageParam, OpenAI::Models::Chat::ChatCompletionUserMessageParam, OpenAI::Models::Chat::ChatCompletionAssistantMessageParam, @@ -302,13 +307,14 @@ module OpenAI ) ], model: T.any(String, OpenAI::Models::ChatModel::OrSymbol), - audio: T.nilable(OpenAI::Models::Chat::ChatCompletionAudioParam), + audio: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionAudioParam, OpenAI::Util::AnyHash)), frequency_penalty: T.nilable(Float), function_call: T.any( OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode::OrSymbol, - OpenAI::Models::Chat::ChatCompletionFunctionCallOption + OpenAI::Models::Chat::ChatCompletionFunctionCallOption, + OpenAI::Util::AnyHash ), - functions: T::Array[OpenAI::Models::Chat::CompletionCreateParams::Function], + functions: T::Array[T.any(OpenAI::Models::Chat::CompletionCreateParams::Function, OpenAI::Util::AnyHash)], logit_bias: T.nilable(T::Hash[Symbol, Integer]), logprobs: T.nilable(T::Boolean), max_completion_tokens: T.nilable(Integer), @@ -317,11 +323,12 @@ module OpenAI modalities: T.nilable(T::Array[OpenAI::Models::Chat::CompletionCreateParams::Modality::OrSymbol]), n: T.nilable(Integer), parallel_tool_calls: T::Boolean, - prediction: T.nilable(OpenAI::Models::Chat::ChatCompletionPredictionContent), + prediction: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionPredictionContent, OpenAI::Util::AnyHash)), presence_penalty: T.nilable(Float), reasoning_effort: T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol), response_format: T.any( OpenAI::Models::ResponseFormatText, + OpenAI::Util::AnyHash, OpenAI::Models::ResponseFormatJSONSchema, OpenAI::Models::ResponseFormatJSONObject ), @@ -329,17 +336,18 @@ module OpenAI service_tier: T.nilable(OpenAI::Models::Chat::CompletionCreateParams::ServiceTier::OrSymbol), stop: T.nilable(T.any(String, T::Array[String])), store: T.nilable(T::Boolean), - stream_options: T.nilable(OpenAI::Models::Chat::ChatCompletionStreamOptions), + stream_options: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionStreamOptions, OpenAI::Util::AnyHash)), temperature: T.nilable(Float), tool_choice: T.any( OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto::OrSymbol, - OpenAI::Models::Chat::ChatCompletionNamedToolChoice + OpenAI::Models::Chat::ChatCompletionNamedToolChoice, + OpenAI::Util::AnyHash ), - tools: T::Array[OpenAI::Models::Chat::ChatCompletionTool], + tools: T::Array[T.any(OpenAI::Models::Chat::ChatCompletionTool, OpenAI::Util::AnyHash)], top_logprobs: T.nilable(Integer), top_p: T.nilable(Float), user: String, - web_search_options: OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions, + web_search_options: T.any(OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions, OpenAI::Util::AnyHash), stream: T.noreturn, request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) ) diff --git a/rbi/lib/openai/resources/completions.rbi b/rbi/lib/openai/resources/completions.rbi index 62ca185f..64b4c2ef 100644 --- a/rbi/lib/openai/resources/completions.rbi +++ b/rbi/lib/openai/resources/completions.rbi @@ -25,7 +25,7 @@ module OpenAI presence_penalty: T.nilable(Float), seed: T.nilable(Integer), stop: T.nilable(T.any(String, T::Array[String])), - stream_options: T.nilable(OpenAI::Models::Chat::ChatCompletionStreamOptions), + stream_options: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionStreamOptions, OpenAI::Util::AnyHash)), suffix: T.nilable(String), temperature: T.nilable(Float), top_p: T.nilable(Float), @@ -168,7 +168,7 @@ module OpenAI presence_penalty: T.nilable(Float), seed: T.nilable(Integer), stop: T.nilable(T.any(String, T::Array[String])), - stream_options: T.nilable(OpenAI::Models::Chat::ChatCompletionStreamOptions), + stream_options: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionStreamOptions, OpenAI::Util::AnyHash)), suffix: T.nilable(String), temperature: T.nilable(Float), top_p: T.nilable(Float), diff --git a/rbi/lib/openai/resources/fine_tuning/jobs.rbi b/rbi/lib/openai/resources/fine_tuning/jobs.rbi index 6e2fc880..df149226 100644 --- a/rbi/lib/openai/resources/fine_tuning/jobs.rbi +++ b/rbi/lib/openai/resources/fine_tuning/jobs.rbi @@ -19,10 +19,12 @@ module OpenAI params( model: T.any(String, OpenAI::Models::FineTuning::JobCreateParams::Model::OrSymbol), training_file: String, - hyperparameters: OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters, - integrations: T.nilable(T::Array[OpenAI::Models::FineTuning::JobCreateParams::Integration]), + hyperparameters: T.any(OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters, OpenAI::Util::AnyHash), + integrations: T.nilable( + T::Array[T.any(OpenAI::Models::FineTuning::JobCreateParams::Integration, OpenAI::Util::AnyHash)] + ), metadata: T.nilable(T::Hash[Symbol, String]), - method_: OpenAI::Models::FineTuning::JobCreateParams::Method, + method_: T.any(OpenAI::Models::FineTuning::JobCreateParams::Method, OpenAI::Util::AnyHash), seed: T.nilable(Integer), suffix: T.nilable(String), validation_file: T.nilable(String), diff --git a/rbi/lib/openai/resources/moderations.rbi b/rbi/lib/openai/resources/moderations.rbi index b6a3164f..0fd74373 100644 --- a/rbi/lib/openai/resources/moderations.rbi +++ b/rbi/lib/openai/resources/moderations.rbi @@ -10,7 +10,7 @@ module OpenAI input: T.any( String, T::Array[String], - T::Array[T.any(OpenAI::Models::ModerationImageURLInput, OpenAI::Models::ModerationTextInput)] + T::Array[T.any(OpenAI::Models::ModerationImageURLInput, OpenAI::Util::AnyHash, OpenAI::Models::ModerationTextInput)] ), model: T.any(String, OpenAI::Models::ModerationModel::OrSymbol), request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) diff --git a/rbi/lib/openai/resources/responses.rbi b/rbi/lib/openai/resources/responses.rbi index 7f02edbe..61ac293c 100644 --- a/rbi/lib/openai/resources/responses.rbi +++ b/rbi/lib/openai/resources/responses.rbi @@ -25,6 +25,7 @@ module OpenAI T::Array[ T.any( OpenAI::Models::Responses::EasyInputMessage, + OpenAI::Util::AnyHash, OpenAI::Models::Responses::ResponseInputItem::Message, OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, @@ -45,18 +46,20 @@ module OpenAI metadata: T.nilable(T::Hash[Symbol, String]), parallel_tool_calls: T.nilable(T::Boolean), previous_response_id: T.nilable(String), - reasoning: T.nilable(OpenAI::Models::Reasoning), + reasoning: T.nilable(T.any(OpenAI::Models::Reasoning, OpenAI::Util::AnyHash)), store: T.nilable(T::Boolean), temperature: T.nilable(Float), - text: OpenAI::Models::Responses::ResponseTextConfig, + text: T.any(OpenAI::Models::Responses::ResponseTextConfig, OpenAI::Util::AnyHash), tool_choice: T.any( OpenAI::Models::Responses::ToolChoiceOptions::OrSymbol, OpenAI::Models::Responses::ToolChoiceTypes, + OpenAI::Util::AnyHash, OpenAI::Models::Responses::ToolChoiceFunction ), tools: T::Array[ T.any( OpenAI::Models::Responses::FileSearchTool, + OpenAI::Util::AnyHash, OpenAI::Models::Responses::FunctionTool, OpenAI::Models::Responses::ComputerTool, OpenAI::Models::Responses::WebSearchTool @@ -200,6 +203,7 @@ module OpenAI T::Array[ T.any( OpenAI::Models::Responses::EasyInputMessage, + OpenAI::Util::AnyHash, OpenAI::Models::Responses::ResponseInputItem::Message, OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, @@ -220,18 +224,20 @@ module OpenAI metadata: T.nilable(T::Hash[Symbol, String]), parallel_tool_calls: T.nilable(T::Boolean), previous_response_id: T.nilable(String), - reasoning: T.nilable(OpenAI::Models::Reasoning), + reasoning: T.nilable(T.any(OpenAI::Models::Reasoning, OpenAI::Util::AnyHash)), store: T.nilable(T::Boolean), temperature: T.nilable(Float), - text: OpenAI::Models::Responses::ResponseTextConfig, + text: T.any(OpenAI::Models::Responses::ResponseTextConfig, OpenAI::Util::AnyHash), tool_choice: T.any( OpenAI::Models::Responses::ToolChoiceOptions::OrSymbol, OpenAI::Models::Responses::ToolChoiceTypes, + OpenAI::Util::AnyHash, OpenAI::Models::Responses::ToolChoiceFunction ), tools: T::Array[ T.any( OpenAI::Models::Responses::FileSearchTool, + OpenAI::Util::AnyHash, OpenAI::Models::Responses::FunctionTool, OpenAI::Models::Responses::ComputerTool, OpenAI::Models::Responses::WebSearchTool diff --git a/rbi/lib/openai/resources/vector_stores.rbi b/rbi/lib/openai/resources/vector_stores.rbi index dd0b6e05..6f4b48c7 100644 --- a/rbi/lib/openai/resources/vector_stores.rbi +++ b/rbi/lib/openai/resources/vector_stores.rbi @@ -16,9 +16,10 @@ module OpenAI params( chunking_strategy: T.any( OpenAI::Models::AutoFileChunkingStrategyParam, + OpenAI::Util::AnyHash, OpenAI::Models::StaticFileChunkingStrategyObjectParam ), - expires_after: OpenAI::Models::VectorStoreCreateParams::ExpiresAfter, + expires_after: T.any(OpenAI::Models::VectorStoreCreateParams::ExpiresAfter, OpenAI::Util::AnyHash), file_ids: T::Array[String], metadata: T.nilable(T::Hash[Symbol, String]), name: String, @@ -68,7 +69,7 @@ module OpenAI sig do params( vector_store_id: String, - expires_after: T.nilable(OpenAI::Models::VectorStoreUpdateParams::ExpiresAfter), + expires_after: T.nilable(T.any(OpenAI::Models::VectorStoreUpdateParams::ExpiresAfter, OpenAI::Util::AnyHash)), metadata: T.nilable(T::Hash[Symbol, String]), name: T.nilable(String), request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) @@ -146,9 +147,9 @@ module OpenAI params( vector_store_id: String, query: T.any(String, T::Array[String]), - filters: T.any(OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter), + filters: T.any(OpenAI::Models::ComparisonFilter, OpenAI::Util::AnyHash, OpenAI::Models::CompoundFilter), max_num_results: Integer, - ranking_options: OpenAI::Models::VectorStoreSearchParams::RankingOptions, + ranking_options: T.any(OpenAI::Models::VectorStoreSearchParams::RankingOptions, OpenAI::Util::AnyHash), rewrite_query: T::Boolean, request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) ) diff --git a/rbi/lib/openai/resources/vector_stores/file_batches.rbi b/rbi/lib/openai/resources/vector_stores/file_batches.rbi index b7a976f9..62bd27b1 100644 --- a/rbi/lib/openai/resources/vector_stores/file_batches.rbi +++ b/rbi/lib/openai/resources/vector_stores/file_batches.rbi @@ -12,6 +12,7 @@ module OpenAI attributes: T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)]), chunking_strategy: T.any( OpenAI::Models::AutoFileChunkingStrategyParam, + OpenAI::Util::AnyHash, OpenAI::Models::StaticFileChunkingStrategyObjectParam ), request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) diff --git a/rbi/lib/openai/resources/vector_stores/files.rbi b/rbi/lib/openai/resources/vector_stores/files.rbi index 753f7397..37afdaf7 100644 --- a/rbi/lib/openai/resources/vector_stores/files.rbi +++ b/rbi/lib/openai/resources/vector_stores/files.rbi @@ -14,6 +14,7 @@ module OpenAI attributes: T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)]), chunking_strategy: T.any( OpenAI::Models::AutoFileChunkingStrategyParam, + OpenAI::Util::AnyHash, OpenAI::Models::StaticFileChunkingStrategyObjectParam ), request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash))