Skip to content

Commit

Permalink
修复:切换embed_model时,FAISS向量库未正确释放,导致`d == self.d assert
Browse files Browse the repository at this point in the history
error`;
ApiRequest中chat接口增加max_tokens参数;FileDocModel模型字段错误(chatchat-space#1691)
  • Loading branch information
liunux4odoo committed Oct 16, 2023
1 parent cd74812 commit fe7a675
Show file tree
Hide file tree
Showing 5 changed files with 22 additions and 12 deletions.
2 changes: 1 addition & 1 deletion server/db/models/knowledge_file_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,4 +37,4 @@ class FileDocModel(Base):
meta_data = Column(JSON, default={})

def __repr__(self):
return f"<FileDoc(id='{self.id}', kb_name='{self.kb_name}', file_name='{self.file_name}', doc_id='{self.doc_id}', metadata='{self.metadata}')>"
return f"<FileDoc(id='{self.id}', kb_name='{self.kb_name}', file_name='{self.file_name}', doc_id='{self.doc_id}', metadata='{self.meta_data}')>"
3 changes: 2 additions & 1 deletion server/knowledge_base/kb_doc_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -337,8 +337,9 @@ def output():
if not kb.exists() and not allow_empty_kb:
yield {"code": 404, "msg": f"未找到知识库 ‘{knowledge_base_name}’"}
else:
if kb.exists():
kb.clear_vs()
kb.create_kb()
kb.clear_vs()
files = list_files_from_folder(knowledge_base_name)
kb_files = [(file, knowledge_base_name) for file in files]
i = 0
Expand Down
2 changes: 1 addition & 1 deletion server/knowledge_base/kb_service/faiss_kb_service.py
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,7 @@ def do_delete_doc(self,

def do_clear_vs(self):
with kb_faiss_pool.atomic:
kb_faiss_pool.pop(self.kb_name)
kb_faiss_pool.pop((self.kb_name, self.vector_name))
shutil.rmtree(self.vs_path)
os.makedirs(self.vs_path)

Expand Down
1 change: 1 addition & 0 deletions server/knowledge_base/migrate.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,6 +72,7 @@ def files2vs(kb_name: str, kb_files: List[KnowledgeFile]):
# 清除向量库,从本地文件重建
if mode == "recreate_vs":
kb.clear_vs()
kb.create_kb()
kb_files = file_to_kbfile(kb_name, list_files_from_folder(kb_name))
files2vs(kb_name, kb_files)
kb.save_vector_store()
Expand Down
26 changes: 17 additions & 9 deletions webui_pages/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -275,7 +275,7 @@ def chat_fastchat(
stream: bool = True,
model: str = LLM_MODEL,
temperature: float = TEMPERATURE,
max_tokens: int = 1024, # todo:根据message内容自动计算max_tokens
max_tokens: int = 1024, # TODO:根据message内容自动计算max_tokens
no_remote_api: bool = None,
**kwargs: Any,
):
Expand Down Expand Up @@ -316,6 +316,7 @@ def chat_chat(
stream: bool = True,
model: str = LLM_MODEL,
temperature: float = TEMPERATURE,
max_tokens: int = 1024,
prompt_name: str = "llm_chat",
no_remote_api: bool = None,
):
Expand All @@ -331,6 +332,7 @@ def chat_chat(
"stream": stream,
"model_name": model,
"temperature": temperature,
"max_tokens": max_tokens,
"prompt_name": prompt_name,
}

Expand All @@ -346,13 +348,14 @@ def chat_chat(
return self._httpx_stream2generator(response)

def agent_chat(
self,
query: str,
history: List[Dict] = [],
stream: bool = True,
model: str = LLM_MODEL,
temperature: float = TEMPERATURE,
no_remote_api: bool = None,
self,
query: str,
history: List[Dict] = [],
stream: bool = True,
model: str = LLM_MODEL,
temperature: float = TEMPERATURE,
max_tokens: int = 1024,
no_remote_api: bool = None,
):
'''
对应api.py/chat/agent_chat 接口
Expand All @@ -366,6 +369,7 @@ def agent_chat(
"stream": stream,
"model_name": model,
"temperature": temperature,
"max_tokens": max_tokens,
}

print(f"received input message:")
Expand All @@ -389,6 +393,7 @@ def knowledge_base_chat(
stream: bool = True,
model: str = LLM_MODEL,
temperature: float = TEMPERATURE,
max_tokens: int = 1024,
prompt_name: str = "knowledge_base_chat",
no_remote_api: bool = None,
):
Expand All @@ -407,6 +412,7 @@ def knowledge_base_chat(
"stream": stream,
"model_name": model,
"temperature": temperature,
"max_tokens": max_tokens,
"local_doc_url": no_remote_api,
"prompt_name": prompt_name,
}
Expand Down Expand Up @@ -435,6 +441,7 @@ def search_engine_chat(
stream: bool = True,
model: str = LLM_MODEL,
temperature: float = TEMPERATURE,
max_tokens: int = 1024,
prompt_name: str = "knowledge_base_chat",
no_remote_api: bool = None,
):
Expand All @@ -452,6 +459,7 @@ def search_engine_chat(
"stream": stream,
"model_name": model,
"temperature": temperature,
"max_tokens": max_tokens,
"prompt_name": prompt_name,
}

Expand All @@ -475,7 +483,7 @@ def search_engine_chat(
def _check_httpx_json_response(
self,
response: httpx.Response,
errorMsg: str = f"无法连接API服务器,请确认已执行python server\\api.py",
errorMsg: str = f"无法连接API服务器,请确认 ‘api.py’ 已正常启动。",
) -> Dict:
'''
check whether httpx returns correct data with normal Response.
Expand Down

0 comments on commit fe7a675

Please sign in to comment.