Skip to content

Commit

Permalink
Get user associated to llm_model
Browse files Browse the repository at this point in the history
  • Loading branch information
romanrizzi committed Jun 13, 2024
1 parent 05858ad commit 8b44b77
Show file tree
Hide file tree
Showing 38 changed files with 566 additions and 236 deletions.
1 change: 1 addition & 0 deletions app/controllers/discourse_ai/admin/ai_llms_controller.rb
Original file line number Diff line number Diff line change
Expand Up @@ -106,6 +106,7 @@ def ai_llm_params
:max_prompt_tokens,
:url,
:api_key,
:bot_username,
)
end
end
Expand Down
2 changes: 2 additions & 0 deletions app/models/llm_model.rb
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
# frozen_string_literal: true

class LlmModel < ActiveRecord::Base
belongs_to :user

def tokenizer_class
tokenizer.constantize
end
Expand Down
3 changes: 2 additions & 1 deletion assets/javascripts/discourse/admin/models/ai-llm.js
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,8 @@ export default class AiLlm extends RestModel {
"tokenizer",
"max_prompt_tokens",
"url",
"api_key"
"api_key",
"bot_username"
);
}

Expand Down
12 changes: 12 additions & 0 deletions assets/javascripts/discourse/components/ai-llm-editor.gjs
Original file line number Diff line number Diff line change
Expand Up @@ -181,6 +181,18 @@ export default class AiLlmEditor extends Component {
@content={{I18n.t "discourse_ai.llms.hints.max_prompt_tokens"}}
/>
</div>
<div class="control-group">
<label>{{i18n "discourse_ai.llms.companion_bot_username"}}</label>
<Input
@type="text"
class="ai-llm-editor-input ai-llm-editor__companion-bot-user"
@value={{@model.bot_username}}
/>
<DTooltip
@icon="question-circle"
@content={{I18n.t "discourse_ai.llms.hints.companion_bot_username"}}
/>
</div>

<div class="control-group ai-llm-editor__action_panel">
<DButton
Expand Down
2 changes: 2 additions & 0 deletions config/locales/client.en.yml
Original file line number Diff line number Diff line change
Expand Up @@ -210,6 +210,7 @@ en:
max_prompt_tokens: "Number of tokens for the prompt"
url: "URL of the service hosting the model"
api_key: "API Key of the service hosting the model"
companion_bot_username: "Companion user's username"
save: "Save"
edit: "Edit"
saved: "LLM Model Saved"
Expand All @@ -226,6 +227,7 @@ en:
hints:
max_prompt_tokens: "Max numbers of tokens for the prompt. As a rule of thumb, this should be 50% of the model's context window."
name: "We include this in the API call to specify which model we'll use."
companion_bot_username: "Features like the AI Bot create an user with this username for users to interact with."

providers:
aws_bedrock: "AWS Bedrock"
Expand Down
3 changes: 3 additions & 0 deletions config/locales/server.en.yml
Original file line number Diff line number Diff line change
Expand Up @@ -318,6 +318,9 @@ en:
disable_module_first: "You have to disable %{setting} first."
set_llm_first: "Set %{setting} first."
model_unreachable: "We couldn't get a response from this model. Check your settings first."
configure_llm:
one: "We couldn't find an LLM with the name %{models}. Go to the plugin's LLMs section to set it up."
other: "We couldn't find configured LLMs with these names: %{models}. Go to the plugin's LLMs section to set them up."
endpoints:
not_configured: "%{display_name} (not configured)"
configuration_hint:
Expand Down
8 changes: 4 additions & 4 deletions config/settings.yml
Original file line number Diff line number Diff line change
Expand Up @@ -342,19 +342,19 @@ discourse_ai:
type: group_list
list_type: compact
default: "3|14" # 3: @staff, 14: @trust_level_4
# Adding a new bot? Make sure to create a user for it on the seed file and update translations.
ai_bot_public_sharing_allowed_groups:
client: false
type: group_list
list_type: compact
default: "1|2" # 1: admins, 2: moderators
allow_any: false
refresh: true
ai_bot_enabled_chat_bots:
ai_bot_enabled_chat_bots: # TODO(roman): Make this dynamic
type: list
default: "gpt-3.5-turbo"
default: ""
client: true
choices: "DiscourseAi::Configuration::LlmEnumerator.ai_bot_models"
validator: "DiscourseAi::Configuration::LlmModelValidator"
choices: "DiscourseAi::Configuration::LlmEnumerator.available_ai_bots"
ai_bot_add_to_header:
default: true
client: true
Expand Down
8 changes: 8 additions & 0 deletions db/migrate/20240528132059_add_companion_user_to_llm_model.rb
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
# frozen_string_literal: true

class AddCompanionUserToLlmModel < ActiveRecord::Migration[7.0]
def change
add_column :llm_models, :bot_username, :string
add_column :llm_models, :user_id, :integer
end
end

This file was deleted.

100 changes: 100 additions & 0 deletions db/post_migrate/20240528144216_seed_open_ai_models.rb
Original file line number Diff line number Diff line change
@@ -0,0 +1,100 @@
# frozen_string_literal: true

class SeedOpenAiModels < ActiveRecord::Migration[7.0]
def up
models = []

open_ai_api_key = fetch_setting("ai_openai_api_key")

if open_ai_api_key.present?
models << mirror_open_ai(
"GPT-3.5 Turbo",
"gpt-3.5-turbo",
8192,
"ai_openai_gpt35_url",
open_ai_api_key,
"gpt3.5_bot",
-111,
)
models << mirror_open_ai(
"GPT-3.5 Turbo 16K",
"gpt-3.5-turbo-16k",
16_384,
"ai_openai_gpt35_16k_url",
open_ai_api_key,
"gpt3.5_bot",
-111,
)
models << mirror_open_ai(
"GPT-4",
"gpt-4",
8192,
"ai_openai_gpt4_url",
open_ai_api_key,
"gpt4_bot",
-110,
)
models << mirror_open_ai(
"GPT-4 32K",
"gpt-4-32k",
32_768,
"ai_openai_gpt4_32k_url",
open_ai_api_key,
"gpt4_bot",
-110,
)
models << mirror_open_ai(
"GPT-4 Turbo",
"gpt-4-turbo",
131_072,
"ai_openai_gpt4_turbo_url",
open_ai_api_key,
"gpt4t_bot",
-113,
)
models << mirror_open_ai(
"GPT-4o",
"gpt-4o",
131_072,
"ai_openai_gpt4o_url",
open_ai_api_key,
"gpt4o_bot",
-121,
)
end

if models.present?
rows = models.compact.join(",")

DB.exec(<<~SQL, rows: rows) if rows.present?
INSERT INTO llm_models (display_name, name, provider, tokenizer, max_prompt_tokens, url, api_key, bot_username, user_id, created_at, updated_at)
VALUES :rows;
SQL
end
end

def has_companion_user?(user_id)
DB.query_single("SELECT id FROM users WHERE id = :user_id", user_id: user_id).first.present?
end

def fetch_setting(name)
DB.query_single(
"SELECT value FROM site_settings WHERE name = :setting_name",
setting_name: name,
).first
end

def mirror_open_ai(display_name, name, max_prompt_tokens, setting_name, key, bot_username, bot_id)
url = fetch_setting(setting_name)

user_id = has_companion_user?(bot_id) ? bot_id : "NULL"

if url
"(#{name.titleize}, #{name}, open_ai, OpenAiTokenizer, #{max_prompt_tokens}, #{url}, #{key}, #{bot_username}, #{user_id}, NOW(), NOW())"
end
end

def down
raise ActiveRecord::IrreversibleMigration
end
end
123 changes: 123 additions & 0 deletions db/post_migrate/20240531205234_seed_claude_models.rb
Original file line number Diff line number Diff line change
@@ -0,0 +1,123 @@
# frozen_string_literal: true

class SeedClaudeModels < ActiveRecord::Migration[7.0]
def up
claude_models = %w[claude-instant-1 claude-2 claude-3-haiku claude-3-sonnet claude-3-opus]

models = []

bedrock_secret_access_key = fetch_setting("ai_bedrock_secret_access_key")

if bedrock_secret_access_key.present?
bedrock_region = fetch_setting("ai_bedrock_region")

claude_models.each do |cm|
url =
"https://bedrock-runtime.#{bedrock_region}.amazonaws.com/model/#{mapped_bedrock_model(cm)}/invoke"

bot_id = claude_bot_id(cm)
user_id = has_companion_user?(bot_id) ? bot_id : "NULL"

models << "(#{display_name(cm)}, #{cm}, aws_bedrock, AnthropicTokenizer, 200000, #{url}, #{bedrock_secret_access_key}, #{claude_bot_username(cm)}, #{user_id}, NOW(), NOW())"
end
end

anthropic_ai_api_key = fetch_setting("ai_anthropic_api_key")
if anthropic_ai_api_key.present?
claude_models.each do |cm|
url = "https://api.anthropic.com/v1/messages"

bot_id = claude_bot_id(cm)
user_id = has_companion_user?(bot_id) ? bot_id : "NULL"

models << "(#{display_name(cm)}, #{cm}, anthropic, AnthropicTokenizer, 200000, #{url}, #{anthropic_ai_api_key}, #{claude_bot_username(cm)}, #{user_id}, NOW(), NOW())"
end
end

if models.present?
rows = models.compact.join(",")

DB.exec(<<~SQL, rows: rows) if rows.present?
INSERT INTO llm_models (display_name, name, provider, tokenizer, max_prompt_tokens, url, api_key, bot_username, user_id, created_at, updated_at)
VALUES :rows;
SQL
end
end

def has_companion_user?(user_id)
DB.query_single("SELECT id FROM users WHERE id = :user_id", user_id: user_id).first.present?
end

def fetch_setting(name)
DB.query_single(
"SELECT value FROM site_settings WHERE name = :setting_name",
setting_name: name,
).first
end

def claude_bot_username(model)
case model
when "claude-2"
"claude_bot"
when "claude-3-haiku"
"claude_3_haiku_bot"
when "claude-3-sonnet"
"claude_3_sonnet_bot"
when "claude-instant-1"
"anthropic.claude-instant-v1"
when "claude-3-opus"
"claude_3_opus_bot"
else
"claude_instant_bot"
end
end

def claude_bot_id(model)
case model
when "claude-2"
-112
when "claude-3-haiku"
-119
when "claude-3-sonnet"
-118
when "claude-instant-1"
nil
when "claude-3-opus"
-117
end
end

def mapped_bedrock_model(model)
case model
when "claude-2"
"anthropic.claude-v2:1"
when "claude-3-haiku"
"anthropic.claude-3-haiku-20240307-v1:0"
when "claude-3-sonnet"
"anthropic.claude-3-sonnet-20240229-v1:0"
when "claude-instant-1"
"anthropic.claude-instant-v1"
when "claude-3-opus"
"anthropic.claude-3-opus-20240229-v1:0"
end
end

def display_name(model)
case model
when "claude-2"
"Claude 2"
when "claude-3-haiku"
"Claude 3 Haiku"
when "claude-3-sonnet"
"Claude 3 Sonnet"
when "claude-instant-1"
"Claude Instant 1"
when "claude-3-opus"
"Claude 3 Opus"
end
end

def down
raise ActiveRecord::IrreversibleMigration
end
end
Loading

0 comments on commit 8b44b77

Please sign in to comment.