-
Notifications
You must be signed in to change notification settings - Fork 5k
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
增加了仅限GPT4的agent功能,陆续补充,中文版readme已写 (#1611)
- Loading branch information
1 parent
c546b42
commit 5702554
Showing
11 changed files
with
277 additions
and
87 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,109 @@ | ||
from uuid import UUID | ||
from langchain.callbacks import AsyncIteratorCallbackHandler | ||
import json | ||
import asyncio | ||
from typing import Any, Dict, List, Optional | ||
|
||
from langchain.schema import AgentFinish, AgentAction | ||
from langchain.schema.output import LLMResult | ||
|
||
|
||
def dumps(obj: Dict) -> str: | ||
return json.dumps(obj, ensure_ascii=False) | ||
|
||
|
||
class Status: | ||
start: int = 1 | ||
running: int = 2 | ||
complete: int = 3 | ||
agent_action: int = 4 | ||
agent_finish: int = 5 | ||
error: int = 6 | ||
make_tool: int = 7 | ||
|
||
|
||
class CustomAsyncIteratorCallbackHandler(AsyncIteratorCallbackHandler): | ||
def __init__(self): | ||
super().__init__() | ||
self.queue = asyncio.Queue() | ||
self.done = asyncio.Event() | ||
self.cur_tool = {} | ||
self.out = True | ||
|
||
async def on_tool_start(self, serialized: Dict[str, Any], input_str: str, *, run_id: UUID, | ||
parent_run_id: UUID | None = None, tags: List[str] | None = None, | ||
metadata: Dict[str, Any] | None = None, **kwargs: Any) -> None: | ||
self.cur_tool = { | ||
"tool_name": serialized["name"], | ||
"input_str": input_str, | ||
"output_str": "", | ||
"status": Status.agent_action, | ||
"run_id": run_id.hex, | ||
"llm_token": "", | ||
"final_answer": "", | ||
"error": "", | ||
} | ||
self.queue.put_nowait(dumps(self.cur_tool)) | ||
|
||
async def on_tool_end(self, output: str, *, run_id: UUID, parent_run_id: UUID | None = None, | ||
tags: List[str] | None = None, **kwargs: Any) -> None: | ||
self.out = True | ||
self.cur_tool.update( | ||
status=Status.agent_finish, | ||
output_str=output.replace("Answer:", ""), | ||
) | ||
self.queue.put_nowait(dumps(self.cur_tool)) | ||
|
||
async def on_tool_error(self, error: Exception | KeyboardInterrupt, *, run_id: UUID, | ||
parent_run_id: UUID | None = None, tags: List[str] | None = None, **kwargs: Any) -> None: | ||
self.out = True | ||
self.cur_tool.update( | ||
status=Status.error, | ||
error=str(error), | ||
) | ||
self.queue.put_nowait(dumps(self.cur_tool)) | ||
|
||
async def on_llm_new_token(self, token: str, **kwargs: Any) -> None: | ||
if token: | ||
if token == "Action": | ||
self.out = False | ||
self.cur_tool.update( | ||
status=Status.running, | ||
llm_token="\n\n", | ||
) | ||
|
||
if self.out: | ||
self.cur_tool.update( | ||
status=Status.running, | ||
llm_token=token, | ||
) | ||
self.queue.put_nowait(dumps(self.cur_tool)) | ||
|
||
async def on_llm_start(self, serialized: Dict[str, Any], prompts: List[str], **kwargs: Any) -> None: | ||
self.cur_tool.update( | ||
status=Status.start, | ||
llm_token="", | ||
) | ||
self.queue.put_nowait(dumps(self.cur_tool)) | ||
|
||
async def on_llm_end(self, response: LLMResult, **kwargs: Any) -> None: | ||
self.out = True | ||
self.cur_tool.update( | ||
status=Status.complete, | ||
llm_token="", | ||
) | ||
self.queue.put_nowait(dumps(self.cur_tool)) | ||
|
||
async def on_llm_error(self, error: Exception | KeyboardInterrupt, **kwargs: Any) -> None: | ||
self.cur_tool.update( | ||
status=Status.error, | ||
error=str(error), | ||
) | ||
self.queue.put_nowait(dumps(self.cur_tool)) | ||
|
||
async def on_agent_finish( | ||
self, finish: AgentFinish, *, run_id: UUID, parent_run_id: Optional[UUID] = None, | ||
tags: Optional[List[str]] = None, | ||
**kwargs: Any, | ||
) -> None: | ||
self.cur_tool = {} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Empty file.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Oops, something went wrong.