diff --git a/README.md b/README.md index 0854812..dc90f3c 100644 --- a/README.md +++ b/README.md @@ -28,13 +28,13 @@ class LanguageModel end def self.all - gpt_4_turbo = LanguageModel.new( - name: "GPT-4 Turbo", + gpt_4o = LanguageModel.new( + name: "GPT-4o", kind: "completion", provider: "openai", configuration: { "api_key" => "your_key", - "model" => "gpt-4-turbo" + "model" => "gpt-4o" } ) @@ -48,7 +48,7 @@ class LanguageModel } ) - [gpt_4_turbo, ada2] + [gpt_4o, ada2] end end diff --git a/lib/openai_api/completion.rb b/lib/openai_api/completion.rb index fe07014..683235f 100644 --- a/lib/openai_api/completion.rb +++ b/lib/openai_api/completion.rb @@ -11,12 +11,14 @@ class Completion # rubocop:disable Metrics/ClassLength def initialize(model, stream: nil, raw: false) @name = model.name @api_key = model.configuration["api_key"] + @model_identifier = model.configuration["model"] @api_url = "https://api.openai.com/v1/chat/completions" @stream = stream @raw = raw end def chat(parameters) + parameters[:model] = @model_identifier # Rails.logger.info("Chatting with \"#{@name}\" model with URL: #{@api_url}.") if @stream.nil? single_request_chat(parameters) diff --git a/lib/openai_api/embedding.rb b/lib/openai_api/embedding.rb index 99715ca..121c3b0 100644 --- a/lib/openai_api/embedding.rb +++ b/lib/openai_api/embedding.rb @@ -8,10 +8,12 @@ class Embedding def initialize(model) @name = model.name @api_key = model.configuration["api_key"] + @model_identifier = model.configuration["model"] @api_url = "https://api.openai.com/v1/embeddings" end def embed(parameters) + parameters[:model] = @model_identifier # Rails.logger.info("Embedding with \"#{@name}\" model with URL: #{@api_url}.") response = connection.post do |request| request.params = params diff --git a/test/test_openai_api.rb b/test/test_openai_api.rb index 88020df..1d75107 100644 --- a/test/test_openai_api.rb +++ b/test/test_openai_api.rb @@ -31,17 +31,17 @@ def self.all # rubocop:disable Metrics/MethodLength } ) - gpt4turbo = LanguageModel.new( - name: "GPT-4-Turbo", + gpt4o = LanguageModel.new( + name: "GPT-4o", kind: "completion", provider: "openai", configuration: { "api_key" => ENV.fetch("OPENAI_API_KEY"), - "model" => "gpt-4-turbo" + "model" => "gpt-4o" } ) - [ada2, gpt4turbo] + [ada2, gpt4o] end end @@ -92,8 +92,7 @@ def test_chat # rubocop:disable Metrics/MethodLength "role" => "system", "content" => "Tell me a joke" } - ], - "model": "gpt-4o" + ] } VCR.use_cassette("test_chat") do @@ -117,8 +116,7 @@ def test_chat_content_filter # rubocop:disable Metrics/MethodLength "role" => "user", "content" => "FUCK SHIT PISS." } - ], - "model": "gpt-4o" + ] } VCR.use_cassette("test_chat_content_filter") do @@ -133,8 +131,7 @@ def test_embedding first_embedding_model = LanguageModel.all.find { |model| model.kind == "embedding" } client = OpenAIAPI::Embedding.new(first_embedding_model) parameters = { - "input" => "Once upon a time", - "model": "text-embedding-ada-002", + "input" => "Once upon a time" } VCR.use_cassette("test_embedding") do