Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
27 changes: 17 additions & 10 deletions agent_chat/src/agents/agent.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
from datetime import timedelta

from pydantic import BaseModel
from restack_ai.agent import agent, import_functions, log
from restack_ai.agent import agent, import_functions, log, AgentError

with import_functions():
from src.functions.llm_chat import LlmChatInput, Message, llm_chat
Expand All @@ -23,15 +23,22 @@ def __init__(self) -> None:

@agent.event
async def messages(self, messages_event: MessagesEvent) -> list[Message]:
log.info(f"Received messages: {messages_event.messages}")
self.messages.extend(messages_event.messages)
assistant_message = await agent.step(
function=llm_chat,
function_input=LlmChatInput(messages=self.messages),
start_to_close_timeout=timedelta(seconds=120),
)
self.messages.append(assistant_message)
return self.messages
try:
log.info(f"Received messages: {messages_event.messages}")
self.messages.extend(messages_event.messages)

log.info(f"Calling llm_chat with messages: {self.messages}")
assistant_message = await agent.step(
function=llm_chat,
function_input=LlmChatInput(messages=self.messages),
start_to_close_timeout=timedelta(seconds=120),
)

self.messages.append(assistant_message)
return self.messages
except Exception as e:
log.error(f"Error in messages: {e}")
raise AgentError(f"Error in messages: {e}")

@agent.event
async def end(self, end: EndEvent) -> EndEvent:
Expand Down
1 change: 1 addition & 0 deletions agent_telephony/twilio/livekit-trunk-setup/.gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -2,3 +2,4 @@
.venv
.env
uv.lock
inbound_trunk.json
8 changes: 0 additions & 8 deletions agent_telephony/twilio/livekit-trunk-setup/inbound_trunk.json

This file was deleted.

9 changes: 8 additions & 1 deletion agent_video/src/agents/agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@

with import_functions():
from src.functions.llm_chat import LlmChatInput, Message, llm_chat

from src.functions.context_docs import context_docs

class MessagesEvent(BaseModel):
messages: list[Message]
Expand Down Expand Up @@ -42,4 +42,11 @@ async def end(self, end: EndEvent) -> EndEvent:

@agent.run
async def run(self) -> None:
docs = await agent.step(function=context_docs)
system_prompt=f"""
You are an interactive video assistant, your answers will be used in text to speech so try to keep answers short and concise so that interaction is seamless.
You can answer questions about the following documentation:
{docs}
"""
self.messages.append(Message(role="system", content=system_prompt))
await agent.condition(lambda: self.end)
25 changes: 25 additions & 0 deletions agent_video/src/functions/context_docs.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
from restack_ai.function import function, log
import aiohttp


async def fetch_content_from_url(url: str) -> str:
async with aiohttp.ClientSession() as session:
async with session.get(url) as response:
if response.status == 200:
return await response.text()
else:
log.error("Failed to fetch content", status=response.status)
raise Exception(f"Failed to fetch content: {response.status}")


@function.defn()
async def context_docs() -> str:
try:
docs_content = await fetch_content_from_url("https://docs.restack.io/llms-full.txt")
log.info("Fetched content from URL", content=len(docs_content))

return docs_content

except Exception as e:
log.error("llm_chat function failed", error=str(e))
raise
2 changes: 2 additions & 0 deletions agent_video/src/services.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
from src.functions.llm_chat import llm_chat
from src.functions.pipeline import pipecat_pipeline
from src.workflows.room import RoomWorkflow
from src.functions.context_docs import context_docs


async def main() -> None:
Expand All @@ -19,6 +20,7 @@ async def main() -> None:
functions=[
llm_chat,
pipecat_pipeline,
context_docs,
],
)

Expand Down