Skip to content

Commit

Permalink
Merge pull request #89 from merefield/force_search
Browse files Browse the repository at this point in the history
FEATURE: add setting to force bot to always perform a forum search
  • Loading branch information
merefield committed Apr 29, 2024
2 parents 36886b4 + 93105a7 commit 84e5bfc
Show file tree
Hide file tree
Showing 6 changed files with 32 additions and 34 deletions.
1 change: 1 addition & 0 deletions config/locales/server.en.yml
Original file line number Diff line number Diff line change
Expand Up @@ -66,6 +66,7 @@ en:
chatbot_forum_search_function_results_content_type: "The scope of content to be returned in the search results. Choose 'posts' for just ranking Posts, 'topics' for the entire Topics that contain those ranked Posts"
chatbot_forum_search_function_results_topic_max_posts_count_strategy: "The strategy used to determine the maximum number of Posts to be returned in the search results if content_type is 'topics'. Choose 'all' for all Posts, 'just_enough' to limit the Posts to only those up to including the ranked Post, 'stretch_if_required' to include all Posts up to the ranked Post regardless of the max setting, 'exact' for exactly the number of Posts specified in the max setting"
chatbot_forum_search_function_results_topic_max_posts_count: "The maximum number of Posts to be returned in the search results if content_type is 'topics'"
chatbot_forum_search_function_force: "(EXPERIMENTAL): Force the bot to search the forum for information at the start of every response cycle. Will likely significantly increase token usage but potentially help with knowledge."
chatbot_forum_search_function_hybrid_search: "(EXPERIMENTAL): Enable hybrid search mode. This will cause the bot to search using native keyword search in addition to embedding based semantic search and it will attempt to blend the results"
chatbot_locations_plugin_support: "(EXPERIMENTAL currently user locations only) Natural language querying capability for <a target='_blank' rel='noopener' href='https://github.com/paviliondev/discourse-locations'>Locations Plugin</a> (when using RAG mode, requires Locations Plugin to be installed)"
chatbot_escalate_to_staff_function: "(EXPERIMENTAL, Chat only) if user requests human assistance or gets irritated, escalate to staff via PM (requires staff group to be populated)"
Expand Down
3 changes: 3 additions & 0 deletions config/settings.yml
Original file line number Diff line number Diff line change
Expand Up @@ -305,6 +305,9 @@ plugins:
default: 3
min: 1
max: 20
chatbot_forum_search_function_force:
client: false
default: false
chatbot_forum_search_function_hybrid_search:
client: false
default: false
Expand Down
1 change: 1 addition & 0 deletions lib/discourse_chatbot/bots/open_ai_bot_base.rb
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@ def initialize(opts)
config.api_type = :azure
config.api_version = SiteSetting.chatbot_open_ai_model_custom_api_version
end
config.log_errors = true if SiteSetting.chatbot_enable_verbose_rails_logging
end

@client = OpenAI::Client.new do |f|
Expand Down
53 changes: 23 additions & 30 deletions lib/discourse_chatbot/bots/open_ai_bot_rag.rb
Original file line number Diff line number Diff line change
Expand Up @@ -98,39 +98,31 @@ def create_func_mapping(functions)
functions.each_with_object({}) { |func, mapping| mapping[func.name] = func }
end

def create_chat_completion(messages, use_functions = true)
def create_chat_completion(messages, use_functions = true, force_search = false)
::DiscourseChatbot.progress_debug_message <<~EOS
I called the LLM to help me
------------------------------
value of messages is: #{messages}
+++++++++++++++++++++++++++++++
EOS
if use_functions && @tools
res = @client.chat(
parameters: {
model: @model_name,
messages: messages,
tools: @tools,
max_tokens: SiteSetting.chatbot_max_response_tokens,
temperature: SiteSetting.chatbot_request_temperature / 100.0,
top_p: SiteSetting.chatbot_request_top_p / 100.0,
frequency_penalty: SiteSetting.chatbot_request_frequency_penalty / 100.0,
presence_penalty: SiteSetting.chatbot_request_presence_penalty / 100.0
}
)
else
res = @client.chat(
parameters: {
model: @model_name,
messages: messages,
max_tokens: SiteSetting.chatbot_max_response_tokens,
temperature: SiteSetting.chatbot_request_temperature / 100.0,
top_p: SiteSetting.chatbot_request_top_p / 100.0,
frequency_penalty: SiteSetting.chatbot_request_frequency_penalty / 100.0,
presence_penalty: SiteSetting.chatbot_request_presence_penalty / 100.0
}
)
end
parameters = {
model: @model_name,
messages: messages,
max_tokens: SiteSetting.chatbot_max_response_tokens,
temperature: SiteSetting.chatbot_request_temperature / 100.0,
top_p: SiteSetting.chatbot_request_top_p / 100.0,
frequency_penalty: SiteSetting.chatbot_request_frequency_penalty / 100.0,
presence_penalty: SiteSetting.chatbot_request_presence_penalty / 100.0
}

parameters.merge!(tools: @tools) if use_functions && @tools

parameters.merge!(tool_choice: {"type": "function", "function": {"name": "local_forum_search"}}) if use_functions && @tools && force_search

res = @client.chat(
parameters: parameters
)

::DiscourseChatbot.progress_debug_message <<~EOS
+++++++++++++++++++++++++++++++++++++++
The llm responded with
Expand All @@ -152,7 +144,7 @@ def generate_response(opts)
# Iteration: #{iteration}
-------------------------------
EOS
res = create_chat_completion(@chat_history + @inner_thoughts)
res = create_chat_completion(@chat_history + @inner_thoughts, true, iteration == 1 && SiteSetting.chatbot_forum_search_function_force)

if res.dig("error")
error_text = "ERROR when trying to perform chat completion: #{res.dig("error", "message")}"
Expand All @@ -162,9 +154,10 @@ def generate_response(opts)

finish_reason = res["choices"][0]["finish_reason"]

if ['stop','length'].include?(finish_reason) || @inner_thoughts.length > 7
if (['stop','length'].include?(finish_reason) || @inner_thoughts.length > 7) &&
!(iteration == 1 && SiteSetting.chatbot_forum_search_function_force)
return res
elsif finish_reason == 'tool_calls'
elsif finish_reason == 'tool_calls' || (iteration == 1 && SiteSetting.chatbot_forum_search_function_force)
handle_function_call(res, opts)
else
raise "Unexpected finish reason: #{finish_reason}"
Expand Down
4 changes: 2 additions & 2 deletions plugin.rb
Original file line number Diff line number Diff line change
@@ -1,14 +1,14 @@
# frozen_string_literal: true
# name: discourse-chatbot
# about: a plugin that allows you to have a conversation with a configurable chatbot in Discourse Chat, Topics and Private Messages
# version: 0.9.16
# version: 0.9.17
# authors: merefield
# url: https://github.com/merefield/discourse-chatbot

gem 'multipart-post', '2.4.0', { require: false }
gem 'faraday-multipart', '1.0.4', { require: false }
gem 'event_stream_parser', '1.0.0', { require: false }
gem "ruby-openai", '6.5.0', { require: false }
gem "ruby-openai", '7.0.0', { require: false }
# google search
gem "google_search_results", '2.2.0'
# wikipedia
Expand Down
4 changes: 2 additions & 2 deletions spec/lib/bot/open_ai_agent_spec.rb
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,8 @@
first_query = get_chatbot_input_fixture("llm_first_query").unshift(system_entry)
second_query = get_chatbot_input_fixture("llm_second_query").unshift(system_entry)

described_class.any_instance.expects(:create_chat_completion).with(first_query).returns(llm_function_response)
described_class.any_instance.expects(:create_chat_completion).with(second_query).returns(llm_final_response)
described_class.any_instance.expects(:create_chat_completion).with(first_query, true, false).returns(llm_function_response)
described_class.any_instance.expects(:create_chat_completion).with(second_query, true, false).returns(llm_final_response)

expect(rag.get_response(query, opts)[:reply]).to eq(llm_final_response["choices"][0]["message"]["content"])
end
Expand Down

0 comments on commit 84e5bfc

Please sign in to comment.