Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions lib/openai/models/audio/speech_create_params.rb
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
module OpenAI
module Models
module Audio
# @see OpenAI::Resources::Audio::Speech#create
class SpeechCreateParams < OpenAI::BaseModel
# @!parse
# extend OpenAI::Type::RequestParameters::Converter
Expand Down
3 changes: 3 additions & 0 deletions lib/openai/models/audio/transcription_create_params.rb
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,9 @@
module OpenAI
module Models
module Audio
# @see OpenAI::Resources::Audio::Transcriptions#create
#
# @see OpenAI::Resources::Audio::Transcriptions#create_streaming
class TranscriptionCreateParams < OpenAI::BaseModel
# @!parse
# extend OpenAI::Type::RequestParameters::Converter
Expand Down
1 change: 1 addition & 0 deletions lib/openai/models/audio/translation_create_params.rb
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
module OpenAI
module Models
module Audio
# @see OpenAI::Resources::Audio::Translations#create
class TranslationCreateParams < OpenAI::BaseModel
# @!parse
# extend OpenAI::Type::RequestParameters::Converter
Expand Down
3 changes: 3 additions & 0 deletions lib/openai/models/batch.rb
Original file line number Diff line number Diff line change
Expand Up @@ -225,6 +225,8 @@ class Batch < OpenAI::BaseModel
# def initialize: (Hash | OpenAI::BaseModel) -> void

# The current status of the batch.
#
# @see OpenAI::Models::Batch#status
module Status
extend OpenAI::Enum

Expand All @@ -244,6 +246,7 @@ module Status
# def self.values; end
end

# @see OpenAI::Models::Batch#errors
class Errors < OpenAI::BaseModel
# @!attribute [r] data
#
Expand Down
1 change: 1 addition & 0 deletions lib/openai/models/batch_cancel_params.rb
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@

module OpenAI
module Models
# @see OpenAI::Resources::Batches#cancel
class BatchCancelParams < OpenAI::BaseModel
# @!parse
# extend OpenAI::Type::RequestParameters::Converter
Expand Down
1 change: 1 addition & 0 deletions lib/openai/models/batch_create_params.rb
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@

module OpenAI
module Models
# @see OpenAI::Resources::Batches#create
class BatchCreateParams < OpenAI::BaseModel
# @!parse
# extend OpenAI::Type::RequestParameters::Converter
Expand Down
1 change: 1 addition & 0 deletions lib/openai/models/batch_list_params.rb
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@

module OpenAI
module Models
# @see OpenAI::Resources::Batches#list
class BatchListParams < OpenAI::BaseModel
# @!parse
# extend OpenAI::Type::RequestParameters::Converter
Expand Down
1 change: 1 addition & 0 deletions lib/openai/models/batch_retrieve_params.rb
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@

module OpenAI
module Models
# @see OpenAI::Resources::Batches#retrieve
class BatchRetrieveParams < OpenAI::BaseModel
# @!parse
# extend OpenAI::Type::RequestParameters::Converter
Expand Down
3 changes: 3 additions & 0 deletions lib/openai/models/beta/assistant.rb
Original file line number Diff line number Diff line change
Expand Up @@ -160,6 +160,7 @@ class Assistant < OpenAI::BaseModel

# def initialize: (Hash | OpenAI::BaseModel) -> void

# @see OpenAI::Models::Beta::Assistant#tool_resources
class ToolResources < OpenAI::BaseModel
# @!attribute [r] code_interpreter
#
Expand Down Expand Up @@ -192,6 +193,7 @@ class ToolResources < OpenAI::BaseModel

# def initialize: (Hash | OpenAI::BaseModel) -> void

# @see OpenAI::Models::Beta::Assistant::ToolResources#code_interpreter
class CodeInterpreter < OpenAI::BaseModel
# @!attribute [r] file_ids
# A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made
Expand All @@ -213,6 +215,7 @@ class CodeInterpreter < OpenAI::BaseModel
# def initialize: (Hash | OpenAI::BaseModel) -> void
end

# @see OpenAI::Models::Beta::Assistant::ToolResources#file_search
class FileSearch < OpenAI::BaseModel
# @!attribute [r] vector_store_ids
# The ID of the
Expand Down
6 changes: 6 additions & 0 deletions lib/openai/models/beta/assistant_create_params.rb
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
module OpenAI
module Models
module Beta
# @see OpenAI::Resources::Beta::Assistants#create
class AssistantCreateParams < OpenAI::BaseModel
# @!parse
# extend OpenAI::Type::RequestParameters::Converter
Expand Down Expand Up @@ -208,6 +209,7 @@ class ToolResources < OpenAI::BaseModel

# def initialize: (Hash | OpenAI::BaseModel) -> void

# @see OpenAI::Models::Beta::AssistantCreateParams::ToolResources#code_interpreter
class CodeInterpreter < OpenAI::BaseModel
# @!attribute [r] file_ids
# A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made
Expand All @@ -229,6 +231,7 @@ class CodeInterpreter < OpenAI::BaseModel
# def initialize: (Hash | OpenAI::BaseModel) -> void
end

# @see OpenAI::Models::Beta::AssistantCreateParams::ToolResources#file_search
class FileSearch < OpenAI::BaseModel
# @!attribute [r] vector_store_ids
# The
Expand Down Expand Up @@ -312,6 +315,8 @@ class VectorStore < OpenAI::BaseModel

# The chunking strategy used to chunk the file(s). If not set, will use the `auto`
# strategy.
#
# @see OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore#chunking_strategy
module ChunkingStrategy
extend OpenAI::Union

Expand Down Expand Up @@ -363,6 +368,7 @@ class Static < OpenAI::BaseModel

# def initialize: (Hash | OpenAI::BaseModel) -> void

# @see OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static#static
class Static < OpenAI::BaseModel
# @!attribute chunk_overlap_tokens
# The number of tokens that overlap between chunks. The default value is `400`.
Expand Down
1 change: 1 addition & 0 deletions lib/openai/models/beta/assistant_delete_params.rb
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
module OpenAI
module Models
module Beta
# @see OpenAI::Resources::Beta::Assistants#delete
class AssistantDeleteParams < OpenAI::BaseModel
# @!parse
# extend OpenAI::Type::RequestParameters::Converter
Expand Down
1 change: 1 addition & 0 deletions lib/openai/models/beta/assistant_list_params.rb
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
module OpenAI
module Models
module Beta
# @see OpenAI::Resources::Beta::Assistants#list
class AssistantListParams < OpenAI::BaseModel
# @!parse
# extend OpenAI::Type::RequestParameters::Converter
Expand Down
1 change: 1 addition & 0 deletions lib/openai/models/beta/assistant_retrieve_params.rb
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
module OpenAI
module Models
module Beta
# @see OpenAI::Resources::Beta::Assistants#retrieve
class AssistantRetrieveParams < OpenAI::BaseModel
# @!parse
# extend OpenAI::Type::RequestParameters::Converter
Expand Down
2 changes: 2 additions & 0 deletions lib/openai/models/beta/assistant_tool_choice.rb
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,8 @@ class AssistantToolChoice < OpenAI::BaseModel
# def initialize: (Hash | OpenAI::BaseModel) -> void

# The type of the tool. If type is `function`, the function name must be set
#
# @see OpenAI::Models::Beta::AssistantToolChoice#type
module Type
extend OpenAI::Enum

Expand Down
3 changes: 3 additions & 0 deletions lib/openai/models/beta/assistant_update_params.rb
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
module OpenAI
module Models
module Beta
# @see OpenAI::Resources::Beta::Assistants#update
class AssistantUpdateParams < OpenAI::BaseModel
# @!parse
# extend OpenAI::Type::RequestParameters::Converter
Expand Down Expand Up @@ -304,6 +305,7 @@ class ToolResources < OpenAI::BaseModel

# def initialize: (Hash | OpenAI::BaseModel) -> void

# @see OpenAI::Models::Beta::AssistantUpdateParams::ToolResources#code_interpreter
class CodeInterpreter < OpenAI::BaseModel
# @!attribute [r] file_ids
# Overrides the list of
Expand All @@ -326,6 +328,7 @@ class CodeInterpreter < OpenAI::BaseModel
# def initialize: (Hash | OpenAI::BaseModel) -> void
end

# @see OpenAI::Models::Beta::AssistantUpdateParams::ToolResources#file_search
class FileSearch < OpenAI::BaseModel
# @!attribute [r] vector_store_ids
# Overrides the
Expand Down
4 changes: 4 additions & 0 deletions lib/openai/models/beta/file_search_tool.rb
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@ class FileSearchTool < OpenAI::BaseModel

# def initialize: (Hash | OpenAI::BaseModel) -> void

# @see OpenAI::Models::Beta::FileSearchTool#file_search
class FileSearch < OpenAI::BaseModel
# @!attribute [r] max_num_results
# The maximum number of results the file search tool should output. The default is
Expand Down Expand Up @@ -71,6 +72,7 @@ class FileSearch < OpenAI::BaseModel

# def initialize: (Hash | OpenAI::BaseModel) -> void

# @see OpenAI::Models::Beta::FileSearchTool::FileSearch#ranking_options
class RankingOptions < OpenAI::BaseModel
# @!attribute score_threshold
# The score threshold for the file search. All values must be a floating point
Expand Down Expand Up @@ -107,6 +109,8 @@ class RankingOptions < OpenAI::BaseModel

# The ranker to use for the file search. If not specified will use the `auto`
# ranker.
#
# @see OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions#ranker
module Ranker
extend OpenAI::Enum

Expand Down
3 changes: 3 additions & 0 deletions lib/openai/models/beta/thread.rb
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,7 @@ class Thread < OpenAI::BaseModel

# def initialize: (Hash | OpenAI::BaseModel) -> void

# @see OpenAI::Models::Beta::Thread#tool_resources
class ToolResources < OpenAI::BaseModel
# @!attribute [r] code_interpreter
#
Expand Down Expand Up @@ -88,6 +89,7 @@ class ToolResources < OpenAI::BaseModel

# def initialize: (Hash | OpenAI::BaseModel) -> void

# @see OpenAI::Models::Beta::Thread::ToolResources#code_interpreter
class CodeInterpreter < OpenAI::BaseModel
# @!attribute [r] file_ids
# A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made
Expand All @@ -109,6 +111,7 @@ class CodeInterpreter < OpenAI::BaseModel
# def initialize: (Hash | OpenAI::BaseModel) -> void
end

# @see OpenAI::Models::Beta::Thread::ToolResources#file_search
class FileSearch < OpenAI::BaseModel
# @!attribute [r] vector_store_ids
# The
Expand Down
17 changes: 17 additions & 0 deletions lib/openai/models/beta/thread_create_and_run_params.rb
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,9 @@
module OpenAI
module Models
module Beta
# @see OpenAI::Resources::Beta::Threads#create_and_run
#
# @see OpenAI::Resources::Beta::Threads#create_and_run_streaming
class ThreadCreateAndRunParams < OpenAI::BaseModel
# @!parse
# extend OpenAI::Type::RequestParameters::Converter
Expand Down Expand Up @@ -322,6 +325,8 @@ class Message < OpenAI::BaseModel
# def initialize: (Hash | OpenAI::BaseModel) -> void

# The text contents of the message.
#
# @see OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message#content
module Content
extend OpenAI::Union

Expand All @@ -345,6 +350,8 @@ module Content
# most cases to represent user-generated messages.
# - `assistant`: Indicates the message is generated by the assistant. Use this
# value to insert messages from the assistant into the conversation.
#
# @see OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message#role
module Role
extend OpenAI::Enum

Expand Down Expand Up @@ -420,6 +427,7 @@ class FileSearch < OpenAI::BaseModel
end
end

# @see OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread#tool_resources
class ToolResources < OpenAI::BaseModel
# @!attribute [r] code_interpreter
#
Expand Down Expand Up @@ -454,6 +462,7 @@ class ToolResources < OpenAI::BaseModel

# def initialize: (Hash | OpenAI::BaseModel) -> void

# @see OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources#code_interpreter
class CodeInterpreter < OpenAI::BaseModel
# @!attribute [r] file_ids
# A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made
Expand All @@ -475,6 +484,7 @@ class CodeInterpreter < OpenAI::BaseModel
# def initialize: (Hash | OpenAI::BaseModel) -> void
end

# @see OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources#file_search
class FileSearch < OpenAI::BaseModel
# @!attribute [r] vector_store_ids
# The
Expand Down Expand Up @@ -558,6 +568,8 @@ class VectorStore < OpenAI::BaseModel

# The chunking strategy used to chunk the file(s). If not set, will use the `auto`
# strategy.
#
# @see OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore#chunking_strategy
module ChunkingStrategy
extend OpenAI::Union

Expand Down Expand Up @@ -609,6 +621,7 @@ class Static < OpenAI::BaseModel

# def initialize: (Hash | OpenAI::BaseModel) -> void

# @see OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static#static
class Static < OpenAI::BaseModel
# @!attribute chunk_overlap_tokens
# The number of tokens that overlap between chunks. The default value is `400`.
Expand Down Expand Up @@ -677,6 +690,7 @@ class ToolResources < OpenAI::BaseModel

# def initialize: (Hash | OpenAI::BaseModel) -> void

# @see OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources#code_interpreter
class CodeInterpreter < OpenAI::BaseModel
# @!attribute [r] file_ids
# A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made
Expand All @@ -698,6 +712,7 @@ class CodeInterpreter < OpenAI::BaseModel
# def initialize: (Hash | OpenAI::BaseModel) -> void
end

# @see OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources#file_search
class FileSearch < OpenAI::BaseModel
# @!attribute [r] vector_store_ids
# The ID of the
Expand Down Expand Up @@ -767,6 +782,8 @@ class TruncationStrategy < OpenAI::BaseModel
# `last_messages`, the thread will be truncated to the n most recent messages in
# the thread. When set to `auto`, messages in the middle of the thread will be
# dropped to fit the context length of the model, `max_prompt_tokens`.
#
# @see OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy#type
module Type
extend OpenAI::Enum

Expand Down
10 changes: 10 additions & 0 deletions lib/openai/models/beta/thread_create_params.rb
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
module OpenAI
module Models
module Beta
# @see OpenAI::Resources::Beta::Threads#create
class ThreadCreateParams < OpenAI::BaseModel
# @!parse
# extend OpenAI::Type::RequestParameters::Converter
Expand Down Expand Up @@ -97,6 +98,8 @@ class Message < OpenAI::BaseModel
# def initialize: (Hash | OpenAI::BaseModel) -> void

# The text contents of the message.
#
# @see OpenAI::Models::Beta::ThreadCreateParams::Message#content
module Content
extend OpenAI::Union

Expand All @@ -120,6 +123,8 @@ module Content
# most cases to represent user-generated messages.
# - `assistant`: Indicates the message is generated by the assistant. Use this
# value to insert messages from the assistant into the conversation.
#
# @see OpenAI::Models::Beta::ThreadCreateParams::Message#role
module Role
extend OpenAI::Enum

Expand Down Expand Up @@ -228,6 +233,7 @@ class ToolResources < OpenAI::BaseModel

# def initialize: (Hash | OpenAI::BaseModel) -> void

# @see OpenAI::Models::Beta::ThreadCreateParams::ToolResources#code_interpreter
class CodeInterpreter < OpenAI::BaseModel
# @!attribute [r] file_ids
# A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made
Expand All @@ -249,6 +255,7 @@ class CodeInterpreter < OpenAI::BaseModel
# def initialize: (Hash | OpenAI::BaseModel) -> void
end

# @see OpenAI::Models::Beta::ThreadCreateParams::ToolResources#file_search
class FileSearch < OpenAI::BaseModel
# @!attribute [r] vector_store_ids
# The
Expand Down Expand Up @@ -332,6 +339,8 @@ class VectorStore < OpenAI::BaseModel

# The chunking strategy used to chunk the file(s). If not set, will use the `auto`
# strategy.
#
# @see OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore#chunking_strategy
module ChunkingStrategy
extend OpenAI::Union

Expand Down Expand Up @@ -383,6 +392,7 @@ class Static < OpenAI::BaseModel

# def initialize: (Hash | OpenAI::BaseModel) -> void

# @see OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static#static
class Static < OpenAI::BaseModel
# @!attribute chunk_overlap_tokens
# The number of tokens that overlap between chunks. The default value is `400`.
Expand Down
Loading