Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

DEV: Rewire AI bot internals to use LlmModel #638

Merged
merged 6 commits into from
Jun 18, 2024
Merged
Show file tree
Hide file tree
Changes from 3 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions app/controllers/discourse_ai/admin/ai_llms_controller.rb
Original file line number Diff line number Diff line change
Expand Up @@ -106,6 +106,8 @@ def ai_llm_params
:max_prompt_tokens,
:url,
:api_key,
:bot_username,
:enabled_chat_bot,
)
end
end
Expand Down
8 changes: 3 additions & 5 deletions app/controllers/discourse_ai/ai_bot/bot_controller.rb
Original file line number Diff line number Diff line change
Expand Up @@ -31,12 +31,10 @@ def stop_streaming_response
end

def show_bot_username
bot_user_id = DiscourseAi::AiBot::EntryPoint.map_bot_model_to_user_id(params[:username])
raise Discourse::InvalidParameters.new(:username) if !bot_user_id
bot_user = DiscourseAi::AiBot::EntryPoint.find_user_from_model(params[:username])
raise Discourse::InvalidParameters.new(:username) if !bot_user

bot_username_lower = User.find(bot_user_id).username_lower

render json: { bot_username: bot_username_lower }, status: 200
render json: { bot_username: bot_user.username_lower }, status: 200
end
end
end
Expand Down
60 changes: 26 additions & 34 deletions app/models/ai_persona.rb
Original file line number Diff line number Diff line change
Expand Up @@ -252,40 +252,32 @@ def ensure_not_system
#
# Table name: ai_personas
#
# id :bigint not null, primary key
# name :string(100) not null
# description :string(2000) not null
# tools :json not null
# system_prompt :string(10000000) not null
# allowed_group_ids :integer default([]), not null, is an Array
# created_by_id :integer
# enabled :boolean default(TRUE), not null
# created_at :datetime not null
# updated_at :datetime not null
# system :boolean default(FALSE), not null
# priority :boolean default(FALSE), not null
# temperature :float
# top_p :float
# user_id :integer
# mentionable :boolean default(FALSE), not null
# default_llm :text
# max_context_posts :integer
# max_post_context_tokens :integer
# max_context_tokens :integer
# vision_enabled :boolean default(FALSE), not null
# vision_max_pixels :integer default(1048576), not null
# rag_chunk_tokens :integer default(374), not null
# rag_chunk_overlap_tokens :integer default(10), not null
# rag_conversation_chunks :integer default(10), not null
# role :enum default("bot"), not null
# role_category_ids :integer default([]), not null, is an Array
# role_tags :string default([]), not null, is an Array
# role_group_ids :integer default([]), not null, is an Array
# role_whispers :boolean default(FALSE), not null
# role_max_responses_per_hour :integer default(50), not null
# question_consolidator_llm :text
# allow_chat :boolean default(FALSE), not null
# tool_details :boolean default(TRUE), not null
# id :bigint not null, primary key
# name :string(100) not null
# description :string(2000) not null
# system_prompt :string(10000000) not null
# allowed_group_ids :integer default([]), not null, is an Array
# created_by_id :integer
# enabled :boolean default(TRUE), not null
# created_at :datetime not null
# updated_at :datetime not null
# system :boolean default(FALSE), not null
# priority :boolean default(FALSE), not null
# temperature :float
# top_p :float
# user_id :integer
# mentionable :boolean default(FALSE), not null
# default_llm :text
# max_context_posts :integer
# vision_enabled :boolean default(FALSE), not null
# vision_max_pixels :integer default(1048576), not null
# rag_chunk_tokens :integer default(374), not null
# rag_chunk_overlap_tokens :integer default(10), not null
# rag_conversation_chunks :integer default(10), not null
# question_consolidator_llm :text
# allow_chat :boolean default(FALSE), not null
# tool_details :boolean default(TRUE), not null
# tools :json not null
#
# Indexes
#
Expand Down
4 changes: 2 additions & 2 deletions app/models/chat_message_custom_prompt.rb
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ class ChatMessageCustomPrompt < ActiveRecord::Base

# == Schema Information
#
# Table name: message_custom_prompts
# Table name: chat_message_custom_prompts
#
# id :bigint not null, primary key
# message_id :bigint not null
Expand All @@ -16,5 +16,5 @@ class ChatMessageCustomPrompt < ActiveRecord::Base
#
# Indexes
#
# index_message_custom_prompts_on_message_id (message_id) UNIQUE
# index_chat_message_custom_prompts_on_message_id (message_id) UNIQUE
#
48 changes: 48 additions & 0 deletions app/models/llm_model.rb
Original file line number Diff line number Diff line change
@@ -1,6 +1,51 @@
# frozen_string_literal: true

class LlmModel < ActiveRecord::Base
FIRST_BOT_USER_ID = -1200

belongs_to :user

def toggle_companion_user
return if bot_username == "fake" && Rails.env.production?

enable_check = SiteSetting.ai_bot_enabled && enabled_chat_bot

if enable_check
if !user
next_id = DB.query_single(<<~SQL).first
SELECT min(id) - 1 FROM users
SQL

new_user =
User.new(
id: [FIRST_BOT_USER_ID, next_id].min,
email: "no_email_#{bot_username}",
name: bot_username.titleize,
username: UserNameSuggester.suggest(bot_username),
active: true,
approved: true,
admin: true,
moderator: true,
trust_level: TrustLevel[4],
)
new_user.save!(validate: false)
self.update!(user: new_user)
else
user.update!(active: true)
end
elsif user
# will include deleted
has_posts = DB.query_single("SELECT 1 FROM posts WHERE user_id = #{user.id} LIMIT 1").present?

if has_posts
user.update!(active: false) if user.active
else
user.destroy!
self.update!(user: nil)
end
end
end

def tokenizer_class
tokenizer.constantize
end
Expand All @@ -20,4 +65,7 @@ def tokenizer_class
# updated_at :datetime not null
# url :string
# api_key :string
# bot_username :string
romanrizzi marked this conversation as resolved.
Show resolved Hide resolved
# user_id :integer
# enabled_chat_bot :boolean default(FALSE), not null
#
14 changes: 5 additions & 9 deletions app/models/shared_ai_conversation.rb
Original file line number Diff line number Diff line change
Expand Up @@ -133,12 +133,10 @@ def formatted_excerpt
end

def self.build_conversation_data(topic, max_posts: DEFAULT_MAX_POSTS, include_usernames: false)
llm_name = nil
topic.topic_allowed_users.each do |tu|
if DiscourseAi::AiBot::EntryPoint::BOT_USER_IDS.include?(tu.user_id)
llm_name = DiscourseAi::AiBot::EntryPoint.find_bot_by_id(tu.user_id)&.llm
end
end
allowed_user_ids = topic.topic_allowed_users.pluck(:user_id)
ai_bot_participant = DiscourseAi::AiBot::EntryPoint.find_participant_in(allowed_user_ids)

llm_name = ai_bot_participant&.llm

llm_name = ActiveSupport::Inflector.humanize(llm_name) if llm_name
llm_name ||= I18n.t("discourse_ai.unknown_model")
Expand Down Expand Up @@ -170,9 +168,7 @@ def self.build_conversation_data(topic, max_posts: DEFAULT_MAX_POSTS, include_us
cooked: post.cooked,
}

mapped[:persona] = persona if ::DiscourseAi::AiBot::EntryPoint::BOT_USER_IDS.include?(
post.user_id,
)
mapped[:persona] = persona if ai_bot_participant&.id == post.user_id
mapped[:username] = post.user&.username if include_usernames
mapped
end,
Expand Down
4 changes: 3 additions & 1 deletion assets/javascripts/discourse/admin/models/ai-llm.js
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,9 @@ export default class AiLlm extends RestModel {
"tokenizer",
"max_prompt_tokens",
"url",
"api_key"
"api_key",
"bot_username",
"enabled_chat_bot"
);
}

Expand Down
10 changes: 6 additions & 4 deletions assets/javascripts/discourse/components/ai-bot-header-icon.gjs
Original file line number Diff line number Diff line change
Expand Up @@ -7,13 +7,15 @@ import i18n from "discourse-common/helpers/i18n";
import { composeAiBotMessage } from "../lib/ai-bot-helper";

export default class AiBotHeaderIcon extends Component {
@service siteSettings;
@service currentUser;
@service composer;

get bots() {
return this.siteSettings.ai_bot_add_to_header
? this.siteSettings.ai_bot_enabled_chat_bots.split("|").filter(Boolean)
: [];
const availableBots = this.currentUser.ai_enabled_chat_bots
.filter((bot) => !bot.is_persosna)
.filter(Boolean);

return availableBots ? availableBots.map((bot) => bot.model_name) : [];
}

@action
Expand Down
38 changes: 37 additions & 1 deletion assets/javascripts/discourse/components/ai-llm-editor.gjs
Original file line number Diff line number Diff line change
@@ -1,11 +1,13 @@
import Component from "@glimmer/component";
import { tracked } from "@glimmer/tracking";
import { Input } from "@ember/component";
import { on } from "@ember/modifier";
import { action } from "@ember/object";
import { later } from "@ember/runloop";
import { inject as service } from "@ember/service";
import BackButton from "discourse/components/back-button";
import DButton from "discourse/components/d-button";
import DToggleSwitch from "discourse/components/d-toggle-switch";
import { popupAjaxError } from "discourse/lib/ajax-error";
import icon from "discourse-common/helpers/d-icon";
import i18n from "discourse-common/helpers/i18n";
Expand Down Expand Up @@ -110,6 +112,21 @@ export default class AiLlmEditor extends Component {
});
}

@action
async toggleEnabledChatBot() {
this.args.model.set("enabled_chat_bot", !this.args.model.enabled_chat_bot);
if (!this.args.model.isNew) {
try {
await this.args.model.update({
enabled_chat_bot: this.args.model.enabled_chat_bot,
});
} catch (e) {
popupAjaxError(e);
}
}
await this.toggleField("enabled_chat_bot", true);
}

<template>
<BackButton
@route="adminPlugins.show.discourse-ai-llms"
Expand Down Expand Up @@ -181,7 +198,26 @@ export default class AiLlmEditor extends Component {
@content={{I18n.t "discourse_ai.llms.hints.max_prompt_tokens"}}
/>
</div>

<div class="control-group">
<label>{{i18n "discourse_ai.llms.companion_bot_username"}}</label>
<Input
@type="text"
class="ai-llm-editor-input ai-llm-editor__companion-bot-user"
@value={{@model.bot_username}}
/>
<DTooltip
@icon="question-circle"
@content={{I18n.t "discourse_ai.llms.hints.companion_bot_username"}}
/>
</div>
<div class="control-group">
<DToggleSwitch
class="ai-llm-editor__enabled-chat-bot"
@state={{@model.enabled_chat_bot}}
@label="discourse_ai.llms.enabled_chat_bot"
{{on "click" this.toggleEnabledChatBot}}
/>
</div>
<div class="control-group ai-llm-editor__action_panel">
<DButton
class="ai-llm-editor__test"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@ import { hash } from "@ember/helper";
import { next } from "@ember/runloop";
import { inject as service } from "@ember/service";
import KeyValueStore from "discourse/lib/key-value-store";
import I18n from "I18n";
import DropdownSelectBox from "select-kit/components/dropdown-select-box";

function isBotMessage(composer, currentUser) {
Expand Down Expand Up @@ -110,15 +109,16 @@ export default class BotSelector extends Component {
}

get llmOptions() {
return this.siteSettings.ai_bot_enabled_chat_bots
.split("|")
.filter(Boolean)
.map((bot) => {
return {
id: bot,
name: I18n.t(`discourse_ai.ai_bot.bot_names.${bot}`),
};
});
const availableBots = this.currentUser.ai_enabled_chat_bots
.filter((bot) => !bot.is_persosna)
.filter(Boolean);

return availableBots.map((bot) => {
return {
id: bot.model_name,
name: bot.display_name,
};
});
}

<template>
Expand Down
3 changes: 3 additions & 0 deletions config/locales/client.en.yml
Original file line number Diff line number Diff line change
Expand Up @@ -210,6 +210,8 @@ en:
max_prompt_tokens: "Number of tokens for the prompt"
url: "URL of the service hosting the model"
api_key: "API Key of the service hosting the model"
companion_bot_username: "Companion user's username"
enabled_chat_bot: "Allow Companion user to act as an AI Bot"
save: "Save"
edit: "Edit"
saved: "LLM Model Saved"
Expand All @@ -226,6 +228,7 @@ en:
hints:
max_prompt_tokens: "Max numbers of tokens for the prompt. As a rule of thumb, this should be 50% of the model's context window."
name: "We include this in the API call to specify which model we'll use."
companion_bot_username: "Some features, like the AI Bot, set up a companion user account other users can interact with. Set this companion user's username here."

providers:
aws_bedrock: "AWS Bedrock"
Expand Down
18 changes: 3 additions & 15 deletions config/settings.yml
Original file line number Diff line number Diff line change
Expand Up @@ -342,30 +342,18 @@ discourse_ai:
type: group_list
list_type: compact
default: "3|14" # 3: @staff, 14: @trust_level_4
# Adding a new bot? Make sure to create a user for it on the seed file and update translations.
ai_bot_public_sharing_allowed_groups:
client: false
type: group_list
list_type: compact
default: "1|2" # 1: admins, 2: moderators
allow_any: false
refresh: true
ai_bot_enabled_chat_bots:
ai_bot_enabled_chat_bots: # TODO(roman): Remove setting. Deprecated
type: list
default: "gpt-3.5-turbo"
client: true
choices:
- gpt-3.5-turbo
- gpt-4
- gpt-4-turbo
- gpt-4o
- claude-2
- gemini-1.5-pro
- mixtral-8x7B-Instruct-V0.1
- claude-3-opus
- claude-3-sonnet
- claude-3-haiku
- cohere-command-r-plus
hidden: true
choices: "DiscourseAi::Configuration::LlmEnumerator.available_ai_bots"
ai_bot_add_to_header:
default: true
client: true
Expand Down
9 changes: 9 additions & 0 deletions db/migrate/20240528132059_add_companion_user_to_llm_model.rb
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
# frozen_string_literal: true

class AddCompanionUserToLlmModel < ActiveRecord::Migration[7.0]
def change
add_column :llm_models, :bot_username, :string
add_column :llm_models, :user_id, :integer
add_column :llm_models, :enabled_chat_bot, :boolean, null: false, default: false
end
end
Loading
Loading