Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix: 🐛 related #2090

Merged
merged 1 commit into from
Jan 26, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion backend/llm/api_brain_qa.py
Original file line number Diff line number Diff line change
Expand Up @@ -331,7 +331,7 @@ async def generate_stream(
chat_service.update_message_by_id(
message_id=str(streamed_chat_history.message_id),
user_message=question.question,
assistant="".join(response_tokens),
assistant="".join(str(token) for token in response_tokens),
metadata=self.metadata,
)

Expand Down
2 changes: 1 addition & 1 deletion backend/modules/brain/dto/inputs.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ class CreateBrainProperties(BaseModel, extra=Extra.forbid):
status: Optional[str] = "private"
model: Optional[str]
temperature: Optional[float] = 0.0
max_tokens: Optional[int] = 256
max_tokens: Optional[int] = 2000
prompt_id: Optional[UUID] = None
brain_type: Optional[BrainType] = BrainType.DOC
brain_definition: Optional[CreateApiBrainDefinition]
Expand Down
73 changes: 39 additions & 34 deletions backend/modules/chat/controller/chat/brainful_chat.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,42 +53,47 @@ def get_answer_generator(
user_id,
chat_question,
):
brain_id_to_use = brain_id
metadata = {}
if not brain_id:
brain_settings = BrainSettings()
supabase_client = get_supabase_client()
embeddings = None
if brain_settings.ollama_api_base_url:
embeddings = OllamaEmbeddings(
base_url=brain_settings.ollama_api_base_url
) # pyright: ignore reportPrivateUsage=none
else:
embeddings = OpenAIEmbeddings()
vector_store = CustomSupabaseVectorStore(
supabase_client, embeddings, table_name="vectors", user_id=user_id
)
# Get the first question from the chat_question

question = chat_question.question
history = chat_service.get_chat_history(chat_id)
if history:
question = history[0].user_message
brain_id_to_use = history[0].brain_id

list_brains = []
if history:
list_brains = vector_store.find_brain_closest_query(user_id, question)
metadata["close_brains"] = list_brains
else:
list_brains = vector_store.find_brain_closest_query(user_id, question)
if list_brains:
brain_id_to_use = list_brains[0]["id"]
else:
brain_id_to_use = None
# Add to metadata close_brains and close_brains_similarity
metadata["close_brains"] = list_brains
brain_settings = BrainSettings()
supabase_client = get_supabase_client()
embeddings = None
if brain_settings.ollama_api_base_url:
embeddings = OllamaEmbeddings(
base_url=brain_settings.ollama_api_base_url
) # pyright: ignore reportPrivateUsage=none
else:
embeddings = OpenAIEmbeddings()
vector_store = CustomSupabaseVectorStore(
supabase_client, embeddings, table_name="vectors", user_id=user_id
)

# Init

brain_id_to_use = brain_id

# Get the first question from the chat_question

question = chat_question.question
history = chat_service.get_chat_history(chat_id)

list_brains = [] # To return

if history and not brain_id_to_use:
# Replace the question with the first question from the history
question = history[0].user_message

if history and not brain_id:
brain_id_to_use = history[0].brain_id

# Calculate the closest brains to the question
list_brains = vector_store.find_brain_closest_query(user_id, question)

metadata["close_brains"] = list_brains

if list_brains and not brain_id_to_use:
brain_id_to_use = list_brains[0]["id"]

# GENERIC
follow_up_questions = chat_service.get_follow_up_question(chat_id)
metadata["follow_up_questions"] = follow_up_questions
metadata["model"] = model
Expand Down
2 changes: 1 addition & 1 deletion backend/modules/chat/service/chat_service.py
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,7 @@ def get_chat_history(self, chat_id: str) -> List[GetChatHistoryOutput]:
assistant=message.assistant,
message_time=message.message_time,
brain_name=brain.name if brain else None,
brain_id=brain.id if brain else None,
brain_id=str(brain.id) if brain else None,
prompt_title=prompt.title if prompt else None,
metadata=message.metadata,
)
Expand Down
Loading