From ba57005e87319493b27c42b3cd2b1e6dd38de712 Mon Sep 17 00:00:00 2001 From: Valerie <41258413+ValerieRossDEV@users.noreply.github.com> Date: Sat, 20 May 2023 14:17:31 +0200 Subject: [PATCH] support prompts in other languages --- LANGUAGE_PROMPT.py | 18 ++++++++++++++++++ question.py | 3 +++ 2 files changed, 21 insertions(+) create mode 100644 LANGUAGE_PROMPT.py diff --git a/LANGUAGE_PROMPT.py b/LANGUAGE_PROMPT.py new file mode 100644 index 00000000000..99452103be6 --- /dev/null +++ b/LANGUAGE_PROMPT.py @@ -0,0 +1,18 @@ +from langchain.prompts.prompt import PromptTemplate + +_template = """Given the following conversation and a follow up question, rephrase the follow up question to be a standalone question in the language of the question. + +Chat History: +{chat_history} +Follow Up Input: {question} +Standalone question:""" +CONDENSE_QUESTION_PROMPT = PromptTemplate.from_template(_template) + +prompt_template = """Use the following pieces of context to answer the question at the end in the language of the question. If you don't know the answer, just say that you don't know, don't try to make up an answer. + +{context} + +Question: {question} +Helpful Answer:""" +QA_PROMPT = PromptTemplate( + template=prompt_template, input_variables=["context", "question"]) \ No newline at end of file diff --git a/question.py b/question.py index 5f3d7f026c9..8df9e36cdb2 100644 --- a/question.py +++ b/question.py @@ -7,6 +7,9 @@ from langchain.chat_models import ChatAnthropic from langchain.vectorstores import SupabaseVectorStore from stats import add_usage +import LANGUAGE_PROMPT + +ConversationalRetrievalChain.prompts = LANGUAGE_PROMPT memory = ConversationBufferMemory( memory_key="chat_history", return_messages=True)