-
Notifications
You must be signed in to change notification settings - Fork 7.9k
Closed
Description
I'm trying to create a group chat with functions as LangChain tools passed to UserProxyAgent. The issue is that all functions throw an error of not being found whenever UserProxyAgent tries to execute them. Tried also with normal function definitions instead of LangChain tools & it gives the same error.
Here is the code
config_list = autogen.config_list_from_json(
env_or_file="OAI_CONFIG_LIST.json",
file_location=".",
)
llm_config = {
# "seed": 42,
"temperature": 0,
"config_list": config_list,
"request_timeout": 120,
}
# ==============================================================================
# Functions
#==============================================================================
# ------------------------------------------------------
class WebSearchToolInput(BaseModel):
query: str = Field(description = "Query to search for on Google")
class WebSearchTool(BaseTool):
name = "web_search"
description = "A function used to search for a query on Google"
args_schema: Type[BaseModel] = WebSearchToolInput
required_args: List = [field for field in args_schema.__annotations__.keys()]
def _run(self, query: str):
# logic here
# ------------------------------------------------------
class ScrapeToolInput(BaseModel):
url: str = Field(description = "A website URL to scrape for data")
class ScrapeTool(BaseTool):
name = "scrape"
description = "A function to scrape a website for data"
args_schema: Type[BaseModel] = ScrapeToolInput
required_args: List = [field for field in args_schema.__annotations__.keys()]
def _run(self, url: str):
# logic here
# ------------------------------------------------------
class KeywordResearchToolInput(BaseModel):
keywords: List["str"] = Field(description = "List of Keywords to search for SEO Optimization")
class KeywordResearchTool(BaseTool):
name = "keyword_research"
description = "A function used to search for SEO Optimization related keywords"
args_schema: Type[BaseModel] = KeywordResearchToolInput
required_args: List = [field for field in args_schema.__annotations__.keys()]
def _run(self, keywords: List[str]) -> str:
# logic here
# ==============================================================================
# Agents
#==============================================================================
market_researcher_llm_config = llm_config.copy()
market_researcher_llm_config["functions"] = [generate_oai_func(WebSearchTool()), generate_oai_func(ScrapeTool())]
Market_Researcher = autogen.AssistantAgent(
name="Market_Researcher",
system_message=""" A Market Researcher. Reply "TERMINATE" in the end when everything is done.
""",
llm_config=market_researcher_llm_config,
is_termination_msg=lambda x: x.get("content", "") and x.get("content", "").rstrip().endswith("TERMINATE"),
)
# ------------------------------------------------------
seo_specialist_llm_config = llm_config.copy()
seo_specialist_llm_config["functions"] = [generate_oai_func(KeywordResearchTool())]
SEO_Specialist = autogen.AssistantAgent(
name="SEO_Specialist",
system_message="""An SEO Specialist. Reply "TERMINATE" in the end when everything is done.
""",
llm_config=seo_specialist_llm_config,
is_termination_msg=lambda x: x.get("content", "") and x.get("content", "").rstrip().endswith("TERMINATE"),
)
# --------------------------- Data Analyst ---------------------------
data_analyst_llm_config = llm_config.copy()
Data_Analyst = autogen.AssistantAgent(
name="Data_Analyst",
system_message="""A Data Analyst. Reply "TERMINATE" in the end when everything is done.
""",
llm_config=data_analyst_llm_config,
is_termination_msg=lambda x: x.get("content", "") and x.get("content", "").rstrip().endswith("TERMINATE"),
)
# ------------------------------------------------------
# research_assistant_llm_config = llm_config.copy()
Research_Assistant = autogen.UserProxyAgent(
name="Research_Assistant",
system_message='''Assistant for the Market Research team.''',
is_termination_msg=lambda x: x.get("content", "") and x.get("content", "").rstrip().endswith("TERMINATE"),
# llm_config=research_assistant_llm_config,
code_execution_config={"work_dir": "coding"},
human_input_mode = "TERMINATE",
max_consecutive_auto_reply=10,
function_map={
"web_search": WebResearchTool()._run,
"scrape": ScrapeTool()._run,
"keyword_research": KeywordResearchTool()._run,
}
)
# ------------------------------------------------------
class MarketResearchTeamToolInput(BaseModel):
instructions: str = Field(description = "Detailed instructions for the Market Research Team")
class MarketResearchTeamTool(BaseTool):
name = "market_research_team"
description = ""
args_schema: Type[BaseModel] = MarketResearchTeamToolInput
required_args: List = [field for field in args_schema.__annotations__.keys()]
def _run(self, instructions: str):
groupchat = autogen.GroupChat(
agents=[Market_Researcher, SEO_Specialist, Data_Analyst, Research_Assistant],
messages=[],
max_round=10
)
new_llm_config = llm_config.copy()
new_llm_config["functions"] = [generate_oai_func(WebSearchTool()),
generate_oai_func(ScrapeTool()),
generate_oai_func(KeywordResearchTool())]
manager = autogen.GroupChatManager(groupchat=groupchat,
name = "Market_Research_Team_Chat_Manager",
llm_config=new_llm_config)
Research_Assistant.initiate_chat(
manager,
message=instructions)
timestamp = datetime.now().strftime("%Y%m%d%H%M%S")
with open(f"chat_history/Market_Research_Team_chat_{timestamp}.json", "w") as json_file:
json.dump(groupchat.messages, json_file, indent=2)
MarketResearchTeamTool()._run(instructions="Do a quick research about AI?")
Siafu
Metadata
Metadata
Assignees
Labels
No labels