From 860d7e378c9600dd5440f3a543c19c38b199e004 Mon Sep 17 00:00:00 2001 From: smian1 Date: Sat, 24 May 2025 16:23:12 -0700 Subject: [PATCH] Refactor conversation processing to utilize llm_medium_experiment (4.1) for Developer Tools > Test a Conversation Prompt --- backend/utils/llm/conversation_processing.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/backend/utils/llm/conversation_processing.py b/backend/utils/llm/conversation_processing.py index 4da6b5ecbed..83a662caecb 100644 --- a/backend/utils/llm/conversation_processing.py +++ b/backend/utils/llm/conversation_processing.py @@ -257,5 +257,5 @@ def generate_summary_with_prompt(conversation_text: str, prompt: str) -> str: You must output only the summary, no other text. Make sure to be concise and clear. """ - response = llm_mini.invoke(prompt) + response = llm_medium_experiment.invoke(prompt) return response.content \ No newline at end of file