Skip to content

Commit

Permalink
Don't store bot username. Minor touches to migrate default values in …
Browse files Browse the repository at this point in the history
…settings
  • Loading branch information
romanrizzi committed Jun 18, 2024
1 parent e45e2cb commit 43acecf
Show file tree
Hide file tree
Showing 14 changed files with 49 additions and 89 deletions.
2 changes: 1 addition & 1 deletion app/controllers/discourse_ai/admin/ai_llms_controller.rb
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,7 @@ def update
llm_model = LlmModel.find(params[:id])

if llm_model.update(ai_llm_params)
llm_model.toggle_companion_user
render json: llm_model
else
render_json_error llm_model
Expand Down Expand Up @@ -106,7 +107,6 @@ def ai_llm_params
:max_prompt_tokens,
:url,
:api_key,
:bot_username,
:enabled_chat_bot,
)
end
Expand Down
9 changes: 4 additions & 5 deletions app/models/llm_model.rb
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ class LlmModel < ActiveRecord::Base
belongs_to :user

def toggle_companion_user
return if bot_username == "fake" && Rails.env.production?
return if name == "fake" && Rails.env.production?

enable_check = SiteSetting.ai_bot_enabled && enabled_chat_bot

Expand All @@ -19,9 +19,9 @@ def toggle_companion_user
new_user =
User.new(
id: [FIRST_BOT_USER_ID, next_id].min,
email: "no_email_#{bot_username}",
name: bot_username.titleize,
username: UserNameSuggester.suggest(bot_username),
email: "no_email_#{name.underscore}",
name: name.titleize,
username: UserNameSuggester.suggest(name),
active: true,
approved: true,
admin: true,
Expand Down Expand Up @@ -65,7 +65,6 @@ def tokenizer_class
# updated_at :datetime not null
# url :string
# api_key :string
# bot_username :string
# user_id :integer
# enabled_chat_bot :boolean default(FALSE), not null
#
10 changes: 9 additions & 1 deletion app/serializers/llm_model_serializer.rb
Original file line number Diff line number Diff line change
Expand Up @@ -3,5 +3,13 @@
class LlmModelSerializer < ApplicationSerializer
root "llm"

attributes :id, :display_name, :name, :provider, :max_prompt_tokens, :tokenizer, :api_key, :url
attributes :id,
:display_name,
:name,
:provider,
:max_prompt_tokens,
:tokenizer,
:api_key,
:url,
:enabled_chat_bot
end
1 change: 0 additions & 1 deletion assets/javascripts/discourse/admin/models/ai-llm.js
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,6 @@ export default class AiLlm extends RestModel {
"max_prompt_tokens",
"url",
"api_key",
"bot_username",
"enabled_chat_bot"
);
}
Expand Down
Original file line number Diff line number Diff line change
@@ -1,13 +1,13 @@
import Component from "@glimmer/component";
import { action } from "@ember/object";
import { service } from "@ember/service";
import { gt } from "truth-helpers";
import DButton from "discourse/components/d-button";
import i18n from "discourse-common/helpers/i18n";
import { composeAiBotMessage } from "../lib/ai-bot-helper";

export default class AiBotHeaderIcon extends Component {
@service currentUser;
@service siteSettings;
@service composer;

get bots() {
Expand All @@ -18,13 +18,17 @@ export default class AiBotHeaderIcon extends Component {
return availableBots ? availableBots.map((bot) => bot.model_name) : [];
}

get showHeaderButton() {
return this.bots.length > 0 && this.siteSettings.ai_bot_add_to_header;
}

@action
compose() {
composeAiBotMessage(this.bots[0], this.composer);
}

<template>
{{#if (gt this.bots.length 0)}}
{{#if this.showHeaderButton}}
<li>
<DButton
@action={{this.compose}}
Expand Down
13 changes: 0 additions & 13 deletions assets/javascripts/discourse/components/ai-llm-editor.gjs
Original file line number Diff line number Diff line change
Expand Up @@ -124,7 +124,6 @@ export default class AiLlmEditor extends Component {
popupAjaxError(e);
}
}
await this.toggleField("enabled_chat_bot", true);
}

<template>
Expand Down Expand Up @@ -198,18 +197,6 @@ export default class AiLlmEditor extends Component {
@content={{I18n.t "discourse_ai.llms.hints.max_prompt_tokens"}}
/>
</div>
<div class="control-group">
<label>{{i18n "discourse_ai.llms.companion_bot_username"}}</label>
<Input
@type="text"
class="ai-llm-editor-input ai-llm-editor__companion-bot-user"
@value={{@model.bot_username}}
/>
<DTooltip
@icon="question-circle"
@content={{I18n.t "discourse_ai.llms.hints.companion_bot_username"}}
/>
</div>
<div class="control-group">
<DToggleSwitch
class="ai-llm-editor__enabled-chat-bot"
Expand Down
2 changes: 0 additions & 2 deletions config/locales/client.en.yml
Original file line number Diff line number Diff line change
Expand Up @@ -210,7 +210,6 @@ en:
max_prompt_tokens: "Number of tokens for the prompt"
url: "URL of the service hosting the model"
api_key: "API Key of the service hosting the model"
companion_bot_username: "Companion user's username"
enabled_chat_bot: "Allow Companion user to act as an AI Bot"
save: "Save"
edit: "Edit"
Expand All @@ -228,7 +227,6 @@ en:
hints:
max_prompt_tokens: "Max numbers of tokens for the prompt. As a rule of thumb, this should be 50% of the model's context window."
name: "We include this in the API call to specify which model we'll use."
companion_bot_username: "Some features, like the AI Bot, set up a companion user account other users can interact with. Set this companion user's username here."

providers:
aws_bedrock: "AWS Bedrock"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@

class AddCompanionUserToLlmModel < ActiveRecord::Migration[7.0]
def change
add_column :llm_models, :bot_username, :string
add_column :llm_models, :user_id, :integer
add_column :llm_models, :enabled_chat_bot, :boolean, null: false, default: false
end
Expand Down
33 changes: 12 additions & 21 deletions db/post_migrate/20240528144216_seed_open_ai_models.rb
Original file line number Diff line number Diff line change
Expand Up @@ -6,25 +6,24 @@ def up

open_ai_api_key = fetch_setting("ai_openai_api_key")
enabled_models = fetch_setting("ai_bot_enabled_chat_bots").to_a.split("|")
enabled_models = ["gpt-3.5-turbo"] if enabled_models.empty?

if open_ai_api_key.present?
models << mirror_open_ai(
"GPT-3.5 Turbo",
"GPT-3.5-Turbo",
"gpt-3.5-turbo",
8192,
"ai_openai_gpt35_url",
open_ai_api_key,
"gpt3.5_bot",
-111,
enabled_models,
)
models << mirror_open_ai(
"GPT-3.5 Turbo 16K",
"GPT-3.5-Turbo-16K",
"gpt-3.5-turbo-16k",
16_384,
"ai_openai_gpt35_16k_url",
open_ai_api_key,
"gpt3.5_bot",
-111,
enabled_models,
)
Expand All @@ -34,27 +33,24 @@ def up
8192,
"ai_openai_gpt4_url",
open_ai_api_key,
"gpt4_bot",
-110,
enabled_models,
)
models << mirror_open_ai(
"GPT-4 32K",
"GPT-4-32K",
"gpt-4-32k",
32_768,
"ai_openai_gpt4_32k_url",
open_ai_api_key,
"gpt4_bot",
-110,
enabled_models,
)
models << mirror_open_ai(
"GPT-4 Turbo",
"GPT-4-Turbo",
"gpt-4-turbo",
131_072,
"ai_openai_gpt4_turbo_url",
open_ai_api_key,
"gpt4t_bot",
-113,
enabled_models,
)
Expand All @@ -64,18 +60,17 @@ def up
131_072,
"ai_openai_gpt4o_url",
open_ai_api_key,
"gpt4o_bot",
-121,
enabled_models,
)
end

if models.present?
rows = models.compact.join(",")
rows = models.compact.join(", ")

DB.exec(<<~SQL, rows: rows) if rows.present?
INSERT INTO llm_models (display_name, name, provider, tokenizer, max_prompt_tokens, url, api_key, bot_username, user_id, enabled_chat_bot, created_at, updated_at)
VALUES :rows;
DB.exec(<<~SQL) if rows.present?
INSERT INTO llm_models(display_name, name, provider, tokenizer, max_prompt_tokens, url, api_key, user_id, enabled_chat_bot, created_at, updated_at)
VALUES #{rows};
SQL
end
end
Expand All @@ -97,18 +92,14 @@ def mirror_open_ai(
max_prompt_tokens,
setting_name,
key,
bot_username,
bot_id,
enabled_models
)
url = fetch_setting(setting_name)

url = fetch_setting(setting_name) || "https://api.openai.com/v1/chat/completions"
user_id = has_companion_user?(bot_id) ? bot_id : "NULL"
enabled = enabled_models.include?(name)

if url
enabled = enabled_models.include?(name)
"(#{name.titleize}, #{name}, open_ai, OpenAiTokenizer, #{max_prompt_tokens}, #{url}, #{key}, #{bot_username}, #{user_id}, #{enabled}, NOW(), NOW())"
end
"('#{display_name}', '#{name}', 'open_ai', 'DiscourseAi::Tokenizer::OpenAiTokenizer', #{max_prompt_tokens}, '#{url}', '#{key}', #{user_id}, #{enabled}, NOW(), NOW())"
end

def down
Expand Down
27 changes: 5 additions & 22 deletions db/post_migrate/20240531205234_seed_claude_models.rb
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ def up
user_id = has_companion_user?(bot_id) ? bot_id : "NULL"

enabled = enabled_models.include?(cm)
models << "(#{display_name(cm)}, #{cm}, aws_bedrock, AnthropicTokenizer, 200000, #{url}, #{bedrock_secret_access_key}, #{claude_bot_username(cm)}, #{user_id}, #{enabled}, NOW(), NOW())"
models << "('#{display_name(cm)}', '#{cm}', 'aws_bedrock', 'DiscourseAi::Tokenizer::AnthropicTokenizer', 200000, '#{url}', '#{bedrock_secret_access_key}', #{user_id}, #{enabled}, NOW(), NOW())"
end
end

Expand All @@ -33,16 +33,16 @@ def up
user_id = has_companion_user?(bot_id) ? bot_id : "NULL"

enabled = enabled_models.include?(cm)
models << "(#{display_name(cm)}, #{cm}, anthropic, AnthropicTokenizer, 200000, #{url}, #{anthropic_ai_api_key}, #{claude_bot_username(cm)}, #{user_id}, #{enabled}, NOW(), NOW())"
models << "('#{display_name(cm)}', '#{cm}', 'anthropic', 'DiscourseAi::Tokenizer::AnthropicTokenizer', 200000, '#{url}', '#{anthropic_ai_api_key}', #{user_id}, #{enabled}, NOW(), NOW())"
end
end

if models.present?
rows = models.compact.join(",")
rows = models.compact.join(", ")

DB.exec(<<~SQL, rows: rows) if rows.present?
INSERT INTO llm_models (display_name, name, provider, tokenizer, max_prompt_tokens, url, api_key, bot_username, user_id, enabled_chat_bot, created_at, updated_at)
VALUES :rows;
INSERT INTO llm_models(display_name, name, provider, tokenizer, max_prompt_tokens, url, api_key, user_id, enabled_chat_bot, created_at, updated_at)
VALUES #{rows};
SQL
end
end
Expand All @@ -58,23 +58,6 @@ def fetch_setting(name)
).first
end

def claude_bot_username(model)
case model
when "claude-2"
"claude_bot"
when "claude-3-haiku"
"claude_3_haiku_bot"
when "claude-3-sonnet"
"claude_3_sonnet_bot"
when "claude-instant-1"
"anthropic.claude-instant-v1"
when "claude-3-opus"
"claude_3_opus_bot"
else
"claude_instant_bot"
end
end

def claude_bot_id(model)
case model
when "claude-2"
Expand Down
18 changes: 6 additions & 12 deletions db/post_migrate/20240603133432_seed_other_propietary_models.rb
Original file line number Diff line number Diff line change
Expand Up @@ -13,14 +13,11 @@ def up
gemini_models.each do |gm|
url = "https://generativelanguage.googleapis.com/v1beta/models/#{gemini_mapped_model(gm)}"

bot_username = "#{gm.underscore}_bot"
bot_username = "gemini_bot" if cm == "gemini-1.5-pro"

bot_user_id = "NULL"
bot_user_id = -115 if cm == "gemini-1.5-pro" && has_companion_user?(-115)
bot_user_id = -115 if gm == "gemini-1.5-pro" && has_companion_user?(-115)

enabled = enabled_models.include?(gm)
models << "(#{gm.titleize}, #{gm}, google, OpenAiTokenizer, #{gemini_tokens(gm)}, #{url}, #{gemini_key}, #{bot_username}, #{bot_user_id}, #{enabled}, NOW(), NOW())"
models << "('#{gm.titleize}', '#{gm}', 'google', 'DiscourseAi::Tokenizer::OpenAiTokenizer', '#{gemini_tokens(gm)}', '#{url}', '#{gemini_key}', #{bot_user_id}, #{enabled}, NOW(), NOW())"
end
end

Expand All @@ -30,23 +27,20 @@ def up
cohere_models = %w[command-light command command-r command-r-plus]

cohere_models.each do |cm|
bot_username = "#{cm.underscore}_bot"
bot_username = "cohere_command_bot" if cm == "command-r-plus"

bot_user_id = "NULL"
bot_user_id = -120 if cm == "command-r-plus" && has_companion_user?(-120)

enabled = enabled_models.include?(cm)
models << "(#{cm.titleize}, #{cm}, cohere, OpenAiTokenizer, #{cohere_tokens(cm)}, https://api.cohere.ai/v1/chat, #{cohere_key}, #{bot_username}, #{bot_user_id}, #{enabled}, NOW(), NOW())"
models << "('#{cm.titleize}', '#{cm}', 'cohere', 'DiscourseAi::Tokenizer::OpenAiTokenizer', #{cohere_tokens(cm)}, 'https://api.cohere.ai/v1/chat', '#{cohere_key}', #{bot_user_id}, #{enabled}, NOW(), NOW())"
end
end

if models.present?
rows = models.compact.join(",")
rows = models.compact.join(", ")

DB.exec(<<~SQL, rows: rows) if rows.present?
INSERT INTO llm_models (display_name, name, provider, tokenizer, max_prompt_tokens, url, api_key, bot_username, user_id, enabled_chat_bot, created_at, updated_at)
VALUES :rows;
INSERT INTO llm_models(display_name, name, provider, tokenizer, max_prompt_tokens, url, api_key, user_id, enabled_chat_bot, created_at, updated_at)
VALUES #{rows};
SQL
end
end
Expand Down
12 changes: 6 additions & 6 deletions db/post_migrate/20240603143158_seed_oss_models.rb
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ def up
name = hf_display_name || "mistralai/Mixtral"
token_limit = hf_token_limit || 32_000

models << "(#{name}, #{name}, hugging_face, MixtralTokenizer, #{token_limit}, #{hf_url}, #{hf_key}, mixtral_bot, #{user_id}, NOW(), NOW())"
models << "('#{name}', '#{name}', 'hugging_face', 'DiscourseAi::Tokenizer::MixtralTokenizer', #{token_limit}, '#{hf_url}', '#{hf_key}', #{user_id}, NOW(), NOW())"
end

vllm_key = fetch_setting("ai_vllm_api_key")
Expand All @@ -26,7 +26,7 @@ def up
url = "#{vllm_url}/v1/chat/completions"
name = "mistralai/Mixtral"

models << "(#{name}, #{name}, vllm, MixtralTokenizer, 32000, #{url}, #{vllm_key}, mixtral_bot, #{user_id}, NOW(), NOW())"
models << "('#{name}', '#{name}', 'vllm', 'DiscourseAi::Tokenizer::MixtralTokenizer', 32000, '#{url}', '#{vllm_key}', #{user_id}, NOW(), NOW())"
end

vllm_srv = fetch_setting("ai_vllm_endpoint_srv")
Expand All @@ -35,15 +35,15 @@ def up
url = "https://shadowed-by-srv.invalid"
name = "mistralai/Mixtral"

models << "(#{name}, #{name}, vllm, MixtralTokenizer, 32000, #{url}, #{vllm_key}, mixtral_bot, #{user_id}, NOW(), NOW())"
models << "('#{name}', '#{name}', 'vllm', 'DiscourseAi::Tokenizer::MixtralTokenizer', 32000, '#{url}', '#{vllm_key}', #{user_id}, NOW(), NOW())"
end

if models.present?
rows = models.compact.join(",")
rows = models.compact.join(", ")

DB.exec(<<~SQL, rows: rows) if rows.present?
INSERT INTO llm_models (display_name, name, provider, tokenizer, max_prompt_tokens, url, api_key, bot_username, user_id, created_at, updated_at)
VALUES :rows;
INSERT INTO llm_models(display_name, name, provider, tokenizer, max_prompt_tokens, url, api_key, user_id, created_at, updated_at)
VALUES #{rows};
SQL
end
end
Expand Down
Loading

0 comments on commit 43acecf

Please sign in to comment.