diff --git a/bootstraprag/cli.py b/bootstraprag/cli.py index 089add1..5be2a7e 100644 --- a/bootstraprag/cli.py +++ b/bootstraprag/cli.py @@ -3,12 +3,14 @@ from pathlib import Path import os import zipfile +from InquirerPy import inquirer @click.group() def cli(): pass + # used for downloading the project as zip. def create_zip(project_name): zip_path = shutil.make_archive(project_name, 'zip', project_name) @@ -31,15 +33,18 @@ def create(project_name, framework, template, observability): elif framework == 'None': framework = 'qdrant' template_choices = ['simple-search'] - - template = click.prompt("Which template would you like to use?", - type=click.Choice(template_choices) - ) + # Use InquirerPy to select template with arrow keys + template = inquirer.select( + message="Which template would you like to use?", + choices=template_choices, + ).execute() if framework == 'llamaindex' or framework == 'langchain' or framework == 'haystack': observability_choices = ['Yes', 'No'] - observability = click.prompt("Do you wish to enable observability?", - type=click.Choice(observability_choices) - ) + # Use InquirerPy to select observability with arrow keys + observability = inquirer.select( + message="Do you wish to enable observability?", + choices=observability_choices, + ).execute() click.echo(f'You have selected framework: {framework} and template: {template} and observability: {observability}') download_and_extract_template(project_name, framework, template, observability) @@ -67,4 +72,4 @@ def download_and_extract_template(project_name, framework, template, observabili cli.add_command(create) if __name__ == "__main__": - cli() \ No newline at end of file + cli() diff --git a/bootstraprag/templates/llamaindex/llama_agents_simpleq/__init__.py b/bootstraprag/templates/llamaindex/llama_agents_simpleq/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/bootstraprag/templates/llamaindex/llama_agents_simpleq/agents_core.py b/bootstraprag/templates/llamaindex/llama_agents_simpleq/agents_core.py new file mode 100644 index 0000000..75d4647 --- /dev/null +++ b/bootstraprag/templates/llamaindex/llama_agents_simpleq/agents_core.py @@ -0,0 +1,44 @@ +from llama_agents import ( + AgentService, + AgentOrchestrator, + ControlPlaneServer, + SimpleMessageQueue, +) + +from llama_index.core.agent import ReActAgent +from llama_index.core.tools import FunctionTool +from llama_index.llms.openai import OpenAI + + +# create an agent +def get_the_secret_fact() -> str: + """Returns the secret fact.""" + return "The secret fact is: A baby llama is called a 'Cria'." + + +tool = FunctionTool.from_defaults(fn=get_the_secret_fact) + +agent1 = ReActAgent.from_tools([tool], llm=OpenAI()) +agent2 = ReActAgent.from_tools([], llm=OpenAI()) + +# create our multi-agent framework components +message_queue = SimpleMessageQueue(port=8000) +control_plane = ControlPlaneServer( + message_queue=message_queue, + orchestrator=AgentOrchestrator(llm=OpenAI(model="gpt-4-turbo")), + port=8001, +) +agent_server_1 = AgentService( + agent=agent1, + message_queue=message_queue, + description="Useful for getting the secret fact.", + service_name="secret_fact_agent", + port=8002, +) +agent_server_2 = AgentService( + agent=agent2, + message_queue=message_queue, + description="Useful for getting random dumb facts.", + service_name="dumb_fact_agent", + port=8003, +) \ No newline at end of file diff --git a/bootstraprag/templates/llamaindex/llama_agents_simpleq/main.py b/bootstraprag/templates/llamaindex/llama_agents_simpleq/main.py new file mode 100644 index 0000000..38c7a13 --- /dev/null +++ b/bootstraprag/templates/llamaindex/llama_agents_simpleq/main.py @@ -0,0 +1,16 @@ +from llama_agents import LocalLauncher +import nest_asyncio +from agents_core import agent_server_1, agent_server_2, control_plane, message_queue + +# needed for running in a notebook +nest_asyncio.apply() + +# launch it +launcher = LocalLauncher( + [agent_server_1, agent_server_2], + control_plane, + message_queue, +) +result = launcher.launch_single("What is the secret fact?") + +print(f"Result: {result}") \ No newline at end of file diff --git a/bootstraprag/templates/llamaindex/llama_agents_simpleq/requirements.txt b/bootstraprag/templates/llamaindex/llama_agents_simpleq/requirements.txt new file mode 100644 index 0000000..d865b62 --- /dev/null +++ b/bootstraprag/templates/llamaindex/llama_agents_simpleq/requirements.txt @@ -0,0 +1,10 @@ +llama-agents==0.0.14 +llama-index==0.10.65 +llama-index-llms-openai==0.1.29 +llama-index-llms-ollama==0.2.2 +llama-index-embeddings-openai==0.1.11 +llama-index-embeddings-ollama==0.2.0 +llama-index-vector-stores-qdrant==0.2.16 +qdrant-client==1.11.0 +fastapi==0.112.1 +uvicorn==0.30.6 \ No newline at end of file diff --git a/bootstraprag/templates/llamaindex/simple_rag/requirements.txt b/bootstraprag/templates/llamaindex/simple_rag/requirements.txt index c48d0e3..9b7f3f8 100644 --- a/bootstraprag/templates/llamaindex/simple_rag/requirements.txt +++ b/bootstraprag/templates/llamaindex/simple_rag/requirements.txt @@ -1,10 +1,10 @@ python-dotenv==1.0.1 -llama-index==0.10.58 -llama-index-llms-openai==0.1.27 -llama-index-llms-ollama==0.2.0 +llama-index==0.10.65 +llama-index-llms-openai==0.1.29 +llama-index-llms-ollama==0.2.2 llama-index-embeddings-openai==0.1.11 -llama-index-embeddings-ollama==0.1.2 -llama-index-vector-stores-qdrant==0.2.14 -qdrant-client==1.10.1 -fastapi==0.112.0 -uvicorn==0.30.5 \ No newline at end of file +llama-index-embeddings-ollama==0.2.0 +llama-index-vector-stores-qdrant==0.2.16 +qdrant-client==1.11.0 +fastapi==0.112.1 +uvicorn==0.30.6 \ No newline at end of file diff --git a/requirements.txt b/requirements.txt index 27a61d8..b7d4e51 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,2 +1,3 @@ click==8.1.7 -python-dotenv==1.0.1 \ No newline at end of file +python-dotenv==1.0.1 +inquirerpy==0.3.4 \ No newline at end of file