Route requests to specialized agents based on intent classification. Supports multiple agent types with automatic fallback handling.
pip install -r requirements.txtfrom agentrouter import Router, Agent
router = Router()
@router.agent("code")
class CodeAgent(Agent):
def handle(self, message):
return "I can help with code..."
@router.agent("search")
class SearchAgent(Agent):
def handle(self, message):
return "Searching for..."
response = router.route("Write a Python function")- Intent-based routing using LLM classification
- Agent registry with priority ordering
- Fallback handling for unmatched intents
- Conversation handoff between agents
- Async support
router = Router(
classifier: Optional[Callable] = None,
fallback_agent: Optional[str] = None
)agent(name: str, intents: List[str])- Decorator to register agentroute(message: str) -> Response- Route message to appropriate agenthandoff(from_agent: str, to_agent: str, context: Dict)- Transfer conversation
class MyAgent(Agent):
intents = ["code", "programming"]
def handle(self, message: str, context: Dict) -> str:
# Process message
return responserouter = Router(
classifier=custom_classifier,
fallback_agent="general"
)
# Set intent confidence threshold
router.confidence_threshold = 0.7router = Router()
@router.agent("code", intents=["code", "programming", "debug"])
class CodeAgent(Agent):
def handle(self, message, context):
return self.llm.complete(f"Help with code: {message}")
@router.agent("search", intents=["search", "find", "lookup"])
class SearchAgent(Agent):
def handle(self, message, context):
results = self.search_api.query(message)
return format_results(results)
@router.agent("general", fallback=True)
class GeneralAgent(Agent):
def handle(self, message, context):
return self.llm.complete(message)@router.agent("support")
class SupportAgent(Agent):
def handle(self, message, context):
if needs_technical_help(message):
return router.handoff("support", "code", context)
return "How can I help?"MIT