diff --git a/backend/LANGUAGE_PROMPT.py b/backend/LANGUAGE_PROMPT.py new file mode 100644 index 00000000000..774fcff1909 --- /dev/null +++ b/backend/LANGUAGE_PROMPT.py @@ -0,0 +1,19 @@ +from langchain.prompts.prompt import PromptTemplate + +_template = """Given the following conversation and a follow up question, answer the follow up question in the initial language of the question. If you don't know the answer, just say that you don't know, don't try to make up an answer. + +Chat History: +{chat_history} +Follow Up Input: {question} +Standalone question:""" +CONDENSE_QUESTION_PROMPT = PromptTemplate.from_template(_template) + +prompt_template = """Use the following pieces of context to answer the question in the language of the question. If you don't know the answer, just say that you don't know, don't try to make up an answer. + +{context} + +Question: {question} +Helpful Answer:""" +QA_PROMPT = PromptTemplate( + template=prompt_template, input_variables=["context", "question"] + ) \ No newline at end of file diff --git a/backend/api.py b/backend/api.py index 4435e814317..f4433f2d19f 100644 --- a/backend/api.py +++ b/backend/api.py @@ -11,6 +11,7 @@ from fastapi.openapi.utils import get_openapi from tempfile import SpooledTemporaryFile import shutil +import LANGUAGE_PROMPT import pypandoc from parsers.common import file_already_exists @@ -73,7 +74,6 @@ class ChatMessage(BaseModel): - file_processors = { ".txt": process_txt, ".csv": process_csv, @@ -115,6 +115,10 @@ async def upload_file(file: UploadFile): async def chat_endpoint(chat_message: ChatMessage): history = chat_message.history # Logic from your Streamlit app goes here. For example: + + #this overwrites the built-in prompt of the ConversationalRetrievalChain + ConversationalRetrievalChain.prompts = LANGUAGE_PROMPT + qa = None if chat_message.model.startswith("gpt"): qa = ConversationalRetrievalChain.from_llm(