Skip to content
This repository was archived by the owner on Jul 22, 2025. It is now read-only.

FEATURE: basic support for GPT-o models #804

Merged
merged 1 commit into from
Sep 16, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 10 additions & 1 deletion lib/completions/dialects/chat_gpt.rb
Original file line number Diff line number Diff line change
Expand Up @@ -39,14 +39,23 @@ def max_prompt_tokens
llm_model.max_prompt_tokens - buffer
end

# no support for streaming or tools or system messages
def is_gpt_o?
llm_model.provider == "open_ai" && llm_model.name.include?("o1-")
end

private

def tools_dialect
@tools_dialect ||= DiscourseAi::Completions::Dialects::OpenAiTools.new(prompt.tools)
end

def system_msg(msg)
{ role: "system", content: msg[:content] }
if is_gpt_o?
{ role: "user", content: msg[:content] }
else
{ role: "system", content: msg[:content] }
end
end

def model_msg(msg)
Expand Down
11 changes: 11 additions & 0 deletions lib/completions/endpoints/open_ai.rb
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,17 @@ def provider_id
AiApiAuditLog::Provider::OpenAI
end

def perform_completion!(dialect, user, model_params = {}, feature_name: nil, &blk)
if dialect.respond_to?(:is_gpt_o?) && dialect.is_gpt_o? && block_given?
# we need to disable streaming and simulate it
blk.call "", lambda { |*| }
response = super(dialect, user, model_params, feature_name: feature_name, &nil)
blk.call response, lambda { |*| }
else
super
end
end

private

def model_uri
Expand Down