Skip to content
This repository was archived by the owner on Jul 22, 2025. It is now read-only.
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions config/locales/client.en.yml
Original file line number Diff line number Diff line change
Expand Up @@ -180,6 +180,9 @@ en:
temperature:
label: "Temperature"
description: "Temperature to use for the LLM. Increase to increase randomness (leave empty to use model default)"
max_output_tokens:
label: "Max output tokens"
description: "When specified, sets an upper bound to the maximum number of tokens the model can generate. Respects LLM's max output tokens limit"

discourse_ai:
title: "AI"
Expand Down
5 changes: 5 additions & 0 deletions discourse_automation/llm_triage.rb
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@
field :max_post_tokens, component: :text
field :stop_sequences, component: :text_list, required: false
field :temperature, component: :text
field :max_output_tokens, component: :text

# Actions
field :category, component: :category
Expand Down Expand Up @@ -85,6 +86,9 @@
temperature = temperature.to_f
end

max_output_tokens = fields.dig("max_output_tokens", "value").to_i
max_output_tokens = nil if max_output_tokens <= 0

max_post_tokens = nil if max_post_tokens <= 0

stop_sequences = fields.dig("stop_sequences", "value")
Expand Down Expand Up @@ -122,6 +126,7 @@
stop_sequences: stop_sequences,
automation: self.automation,
temperature: temperature,
max_output_tokens: max_output_tokens,
action: context["action"],
)
rescue => e
Expand Down
3 changes: 2 additions & 1 deletion lib/automation/llm_triage.rb
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ def self.handle(
temperature: nil,
whisper: nil,
reply_persona_id: nil,
max_output_tokens: nil,
action: nil
)
if category_id.blank? && tags.blank? && canned_reply.blank? && hide_topic.blank? &&
Expand Down Expand Up @@ -59,8 +60,8 @@ def self.handle(
result =
llm.generate(
prompt,
max_tokens: max_output_tokens,
temperature: temperature,
max_tokens: 700, # ~500 words
user: Discourse.system_user,
stop_sequences: stop_sequences,
feature_name: "llm_triage",
Expand Down
2 changes: 2 additions & 0 deletions spec/lib/discourse_automation/llm_triage_spec.rb
Original file line number Diff line number Diff line change
Expand Up @@ -95,6 +95,7 @@ def add_automation_field(name, value, type: "text")
reply_user.update!(admin: true)
add_automation_field("include_personal_messages", true, type: :boolean)
add_automation_field("temperature", "0.2")
add_automation_field("max_output_tokens", "700")
post = Fabricate(:post, topic: personal_message)

prompt_options = nil
Expand All @@ -107,6 +108,7 @@ def add_automation_field(name, value, type: "text")
end

expect(prompt_options[:temperature]).to eq(0.2)
expect(prompt_options[:max_tokens]).to eq(700)

last_post = post.topic.reload.posts.order(:post_number).last
expect(last_post.raw).to eq(canned_reply_text)
Expand Down
Loading