Skip to content

Commit

Permalink
Merge pull request #106 from ValerieRossDEV/main
Browse files Browse the repository at this point in the history
support other prompt languages in new backend
  • Loading branch information
StanGirard committed May 21, 2023
2 parents b76c63b + 8f8c30f commit c2b7c4e
Show file tree
Hide file tree
Showing 2 changed files with 24 additions and 1 deletion.
19 changes: 19 additions & 0 deletions backend/LANGUAGE_PROMPT.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
from langchain.prompts.prompt import PromptTemplate

_template = """Given the following conversation and a follow up question, answer the follow up question in the initial language of the question. If you don't know the answer, just say that you don't know, don't try to make up an answer.
Chat History:
{chat_history}
Follow Up Input: {question}
Standalone question:"""
CONDENSE_QUESTION_PROMPT = PromptTemplate.from_template(_template)

prompt_template = """Use the following pieces of context to answer the question in the language of the question. If you don't know the answer, just say that you don't know, don't try to make up an answer.
{context}
Question: {question}
Helpful Answer:"""
QA_PROMPT = PromptTemplate(
template=prompt_template, input_variables=["context", "question"]
)
6 changes: 5 additions & 1 deletion backend/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
from fastapi.openapi.utils import get_openapi
from tempfile import SpooledTemporaryFile
import shutil
import LANGUAGE_PROMPT
import pypandoc

from parsers.common import file_already_exists
Expand Down Expand Up @@ -73,7 +74,6 @@ class ChatMessage(BaseModel):




file_processors = {
".txt": process_txt,
".csv": process_csv,
Expand Down Expand Up @@ -115,6 +115,10 @@ async def upload_file(file: UploadFile):
async def chat_endpoint(chat_message: ChatMessage):
history = chat_message.history
# Logic from your Streamlit app goes here. For example:

#this overwrites the built-in prompt of the ConversationalRetrievalChain
ConversationalRetrievalChain.prompts = LANGUAGE_PROMPT

qa = None
if chat_message.model.startswith("gpt"):
qa = ConversationalRetrievalChain.from_llm(
Expand Down

0 comments on commit c2b7c4e

Please sign in to comment.