Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
The table of contents is too big for display.
Diff view
Diff view
  •  
  •  
  •  
36 changes: 18 additions & 18 deletions lib/openai.rb
Original file line number Diff line number Diff line change
Expand Up @@ -36,27 +36,27 @@

# Package files.
require_relative "openai/version"
require_relative "openai/util"
require_relative "openai/type/converter"
require_relative "openai/type/unknown"
require_relative "openai/type/boolean_model"
require_relative "openai/type/enum"
require_relative "openai/type/union"
require_relative "openai/type/array_of"
require_relative "openai/type/hash_of"
require_relative "openai/type/base_model"
require_relative "openai/type/base_page"
require_relative "openai/type/base_stream"
require_relative "openai/type/request_parameters"
require_relative "openai/type"
require_relative "openai/internal/util"
require_relative "openai/internal/type/converter"
require_relative "openai/internal/type/unknown"
require_relative "openai/internal/type/boolean_model"
require_relative "openai/internal/type/enum"
require_relative "openai/internal/type/union"
require_relative "openai/internal/type/array_of"
require_relative "openai/internal/type/hash_of"
require_relative "openai/internal/type/base_model"
require_relative "openai/internal/type/base_page"
require_relative "openai/internal/type/base_stream"
require_relative "openai/internal/type/request_parameters"
require_relative "openai/aliases"
require_relative "openai/request_options"
require_relative "openai/errors"
require_relative "openai/transport/base_client"
require_relative "openai/transport/pooled_net_requester"
require_relative "openai/internal/transport/base_client"
require_relative "openai/internal/transport/pooled_net_requester"
require_relative "openai/client"
require_relative "openai/stream"
require_relative "openai/cursor_page"
require_relative "openai/page"
require_relative "openai/internal/stream"
require_relative "openai/internal/cursor_page"
require_relative "openai/internal/page"
require_relative "openai/models/reasoning_effort"
require_relative "openai/models/chat/chat_completion_message"
require_relative "openai/models/fine_tuning/fine_tuning_job_wandb_integration_object"
Expand Down
19 changes: 19 additions & 0 deletions lib/openai/aliases.rb
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
# frozen_string_literal: true

module OpenAI
Unknown = OpenAI::Internal::Type::Unknown

BooleanModel = OpenAI::Internal::Type::BooleanModel

Enum = OpenAI::Internal::Type::Enum

Union = OpenAI::Internal::Type::Union

ArrayOf = OpenAI::Internal::Type::ArrayOf

HashOf = OpenAI::Internal::Type::HashOf

BaseModel = OpenAI::Internal::Type::BaseModel

RequestParameters = OpenAI::Internal::Type::RequestParameters
end
2 changes: 1 addition & 1 deletion lib/openai/client.rb
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
# frozen_string_literal: true

module OpenAI
class Client < OpenAI::Transport::BaseClient
class Client < OpenAI::Internal::Transport::BaseClient
# Default max number of retries to attempt after a failed retryable request.
DEFAULT_MAX_RETRIES = 2

Expand Down
92 changes: 0 additions & 92 deletions lib/openai/cursor_page.rb

This file was deleted.

8 changes: 4 additions & 4 deletions lib/openai/errors.rb
Original file line number Diff line number Diff line change
Expand Up @@ -178,10 +178,10 @@ def self.for(url:, status:, body:, request:, response:, message: nil)
# @param response [nil]
# @param message [String, nil]
def initialize(url:, status:, body:, request:, response:, message: nil)
message ||= OpenAI::Util.dig(body, :message) { {url: url.to_s, status: status, body: body} }
@code = OpenAI::Type::Converter.coerce(String, OpenAI::Util.dig(body, :code))
@param = OpenAI::Type::Converter.coerce(String, OpenAI::Util.dig(body, :param))
@type = OpenAI::Type::Converter.coerce(String, OpenAI::Util.dig(body, :type))
message ||= OpenAI::Internal::Util.dig(body, :message) { {url: url.to_s, status: status, body: body} }
@code = OpenAI::Internal::Type::Converter.coerce(String, OpenAI::Internal::Util.dig(body, :code))
@param = OpenAI::Internal::Type::Converter.coerce(String, OpenAI::Internal::Util.dig(body, :param))
@type = OpenAI::Internal::Type::Converter.coerce(String, OpenAI::Internal::Util.dig(body, :type))
super(
url: url,
status: status,
Expand Down
94 changes: 94 additions & 0 deletions lib/openai/internal/cursor_page.rb
Original file line number Diff line number Diff line change
@@ -0,0 +1,94 @@
# frozen_string_literal: true

module OpenAI
module Internal
# @example
# if cursor_page.has_next?
# cursor_page = cursor_page.next_page
# end
#
# @example
# cursor_page.auto_paging_each do |completion|
# puts(completion)
# end
#
# @example
# completions =
# cursor_page
# .to_enum
# .lazy
# .select { _1.object_id.even? }
# .map(&:itself)
# .take(2)
# .to_a
#
# completions => Array
class CursorPage
include OpenAI::Internal::Type::BasePage

# @return [Array<Object>, nil]
attr_accessor :data

# @return [Boolean]
attr_accessor :has_more

# @api private
#
# @param client [OpenAI::Internal::Transport::BaseClient]
# @param req [Hash{Symbol=>Object}]
# @param headers [Hash{String=>String}, Net::HTTPHeader]
# @param page_data [Hash{Symbol=>Object}]
def initialize(client:, req:, headers:, page_data:)
super
model = req.fetch(:model)

case page_data
in {data: Array | nil => data}
@data = data&.map { OpenAI::Internal::Type::Converter.coerce(model, _1) }
else
end

case page_data
in {has_more: true | false | nil => has_more}
@has_more = has_more
else
end
end

# @return [Boolean]
def next_page?
has_more
end

# @raise [OpenAI::HTTP::Error]
# @return [OpenAI::Internal::CursorPage]
def next_page
unless next_page?
message = "No more pages available. Please check #next_page? before calling ##{__method__}"
raise RuntimeError.new(message)
end

req = OpenAI::Internal::Util.deep_merge(@req, {query: {after: data&.last&.id}})
@client.request(req)
end

# @param blk [Proc]
def auto_paging_each(&blk)
unless block_given?
raise ArgumentError.new("A block must be given to ##{__method__}")
end
page = self
loop do
page.data&.each { blk.call(_1) }
break unless page.next_page?
page = page.next_page
end
end

# @return [String]
def inspect
"#<#{self.class}:0x#{object_id.to_s(16)} data=#{data.inspect} has_more=#{has_more.inspect}>"
end
end
end
end
88 changes: 88 additions & 0 deletions lib/openai/internal/page.rb
Original file line number Diff line number Diff line change
@@ -0,0 +1,88 @@
# frozen_string_literal: true

module OpenAI
module Internal
# @example
# if page.has_next?
# page = page.next_page
# end
#
# @example
# page.auto_paging_each do |model|
# puts(model)
# end
#
# @example
# models =
# page
# .to_enum
# .lazy
# .select { _1.object_id.even? }
# .map(&:itself)
# .take(2)
# .to_a
#
# models => Array
class Page
include OpenAI::Internal::Type::BasePage

# @return [Array<Object>, nil]
attr_accessor :data

# @return [String]
attr_accessor :object

# @api private
#
# @param client [OpenAI::Internal::Transport::BaseClient]
# @param req [Hash{Symbol=>Object}]
# @param headers [Hash{String=>String}, Net::HTTPHeader]
# @param page_data [Array<Object>]
def initialize(client:, req:, headers:, page_data:)
super
model = req.fetch(:model)

case page_data
in {data: Array | nil => data}
@data = data&.map { OpenAI::Internal::Type::Converter.coerce(model, _1) }
else
end

case page_data
in {object: String => object}
@object = object
else
end
end

# @return [Boolean]
def next_page?
false
end

# @raise [OpenAI::HTTP::Error]
# @return [OpenAI::Internal::Page]
def next_page
RuntimeError.new("No more pages available.")
end

# @param blk [Proc]
def auto_paging_each(&blk)
unless block_given?
raise ArgumentError.new("A block must be given to ##{__method__}")
end
page = self
loop do
page.data&.each { blk.call(_1) }
break unless page.next_page?
page = page.next_page
end
end

# @return [String]
def inspect
"#<#{self.class}:0x#{object_id.to_s(16)} data=#{data.inspect} object=#{object.inspect}>"
end
end
end
end
Loading