From a478d94c0e855f0d284c436144406e890de295c5 Mon Sep 17 00:00:00 2001 From: pavanmantha Date: Mon, 19 Aug 2024 12:13:19 +0530 Subject: [PATCH 1/3] new feature of llama-agents --- .../llama_agents_simpleq/__init__.py | 0 .../llama_agents_simpleq/agents_core.py | 44 +++++++++++++++++++ .../llamaindex/llama_agents_simpleq/main.py | 16 +++++++ .../llama_agents_simpleq/requirements.txt | 10 +++++ .../llamaindex/simple_rag/requirements.txt | 16 +++---- 5 files changed, 78 insertions(+), 8 deletions(-) create mode 100644 bootstraprag/templates/llamaindex/llama_agents_simpleq/__init__.py create mode 100644 bootstraprag/templates/llamaindex/llama_agents_simpleq/agents_core.py create mode 100644 bootstraprag/templates/llamaindex/llama_agents_simpleq/main.py create mode 100644 bootstraprag/templates/llamaindex/llama_agents_simpleq/requirements.txt diff --git a/bootstraprag/templates/llamaindex/llama_agents_simpleq/__init__.py b/bootstraprag/templates/llamaindex/llama_agents_simpleq/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/bootstraprag/templates/llamaindex/llama_agents_simpleq/agents_core.py b/bootstraprag/templates/llamaindex/llama_agents_simpleq/agents_core.py new file mode 100644 index 0000000..75d4647 --- /dev/null +++ b/bootstraprag/templates/llamaindex/llama_agents_simpleq/agents_core.py @@ -0,0 +1,44 @@ +from llama_agents import ( + AgentService, + AgentOrchestrator, + ControlPlaneServer, + SimpleMessageQueue, +) + +from llama_index.core.agent import ReActAgent +from llama_index.core.tools import FunctionTool +from llama_index.llms.openai import OpenAI + + +# create an agent +def get_the_secret_fact() -> str: + """Returns the secret fact.""" + return "The secret fact is: A baby llama is called a 'Cria'." + + +tool = FunctionTool.from_defaults(fn=get_the_secret_fact) + +agent1 = ReActAgent.from_tools([tool], llm=OpenAI()) +agent2 = ReActAgent.from_tools([], llm=OpenAI()) + +# create our multi-agent framework components +message_queue = SimpleMessageQueue(port=8000) +control_plane = ControlPlaneServer( + message_queue=message_queue, + orchestrator=AgentOrchestrator(llm=OpenAI(model="gpt-4-turbo")), + port=8001, +) +agent_server_1 = AgentService( + agent=agent1, + message_queue=message_queue, + description="Useful for getting the secret fact.", + service_name="secret_fact_agent", + port=8002, +) +agent_server_2 = AgentService( + agent=agent2, + message_queue=message_queue, + description="Useful for getting random dumb facts.", + service_name="dumb_fact_agent", + port=8003, +) \ No newline at end of file diff --git a/bootstraprag/templates/llamaindex/llama_agents_simpleq/main.py b/bootstraprag/templates/llamaindex/llama_agents_simpleq/main.py new file mode 100644 index 0000000..38c7a13 --- /dev/null +++ b/bootstraprag/templates/llamaindex/llama_agents_simpleq/main.py @@ -0,0 +1,16 @@ +from llama_agents import LocalLauncher +import nest_asyncio +from agents_core import agent_server_1, agent_server_2, control_plane, message_queue + +# needed for running in a notebook +nest_asyncio.apply() + +# launch it +launcher = LocalLauncher( + [agent_server_1, agent_server_2], + control_plane, + message_queue, +) +result = launcher.launch_single("What is the secret fact?") + +print(f"Result: {result}") \ No newline at end of file diff --git a/bootstraprag/templates/llamaindex/llama_agents_simpleq/requirements.txt b/bootstraprag/templates/llamaindex/llama_agents_simpleq/requirements.txt new file mode 100644 index 0000000..d865b62 --- /dev/null +++ b/bootstraprag/templates/llamaindex/llama_agents_simpleq/requirements.txt @@ -0,0 +1,10 @@ +llama-agents==0.0.14 +llama-index==0.10.65 +llama-index-llms-openai==0.1.29 +llama-index-llms-ollama==0.2.2 +llama-index-embeddings-openai==0.1.11 +llama-index-embeddings-ollama==0.2.0 +llama-index-vector-stores-qdrant==0.2.16 +qdrant-client==1.11.0 +fastapi==0.112.1 +uvicorn==0.30.6 \ No newline at end of file diff --git a/bootstraprag/templates/llamaindex/simple_rag/requirements.txt b/bootstraprag/templates/llamaindex/simple_rag/requirements.txt index c48d0e3..9b7f3f8 100644 --- a/bootstraprag/templates/llamaindex/simple_rag/requirements.txt +++ b/bootstraprag/templates/llamaindex/simple_rag/requirements.txt @@ -1,10 +1,10 @@ python-dotenv==1.0.1 -llama-index==0.10.58 -llama-index-llms-openai==0.1.27 -llama-index-llms-ollama==0.2.0 +llama-index==0.10.65 +llama-index-llms-openai==0.1.29 +llama-index-llms-ollama==0.2.2 llama-index-embeddings-openai==0.1.11 -llama-index-embeddings-ollama==0.1.2 -llama-index-vector-stores-qdrant==0.2.14 -qdrant-client==1.10.1 -fastapi==0.112.0 -uvicorn==0.30.5 \ No newline at end of file +llama-index-embeddings-ollama==0.2.0 +llama-index-vector-stores-qdrant==0.2.16 +qdrant-client==1.11.0 +fastapi==0.112.1 +uvicorn==0.30.6 \ No newline at end of file From 4e017c0a67ba92de6a58877faf1b9cbbcbcb5402 Mon Sep 17 00:00:00 2001 From: pavanmantha Date: Tue, 20 Aug 2024 23:02:22 +0530 Subject: [PATCH 2/3] implemented inquirerpy for easy selction of options in cli --- bootstraprag/cli.py | 21 +++++++++++++-------- requirements.txt | 3 ++- 2 files changed, 15 insertions(+), 9 deletions(-) diff --git a/bootstraprag/cli.py b/bootstraprag/cli.py index 089add1..5be2a7e 100644 --- a/bootstraprag/cli.py +++ b/bootstraprag/cli.py @@ -3,12 +3,14 @@ from pathlib import Path import os import zipfile +from InquirerPy import inquirer @click.group() def cli(): pass + # used for downloading the project as zip. def create_zip(project_name): zip_path = shutil.make_archive(project_name, 'zip', project_name) @@ -31,15 +33,18 @@ def create(project_name, framework, template, observability): elif framework == 'None': framework = 'qdrant' template_choices = ['simple-search'] - - template = click.prompt("Which template would you like to use?", - type=click.Choice(template_choices) - ) + # Use InquirerPy to select template with arrow keys + template = inquirer.select( + message="Which template would you like to use?", + choices=template_choices, + ).execute() if framework == 'llamaindex' or framework == 'langchain' or framework == 'haystack': observability_choices = ['Yes', 'No'] - observability = click.prompt("Do you wish to enable observability?", - type=click.Choice(observability_choices) - ) + # Use InquirerPy to select observability with arrow keys + observability = inquirer.select( + message="Do you wish to enable observability?", + choices=observability_choices, + ).execute() click.echo(f'You have selected framework: {framework} and template: {template} and observability: {observability}') download_and_extract_template(project_name, framework, template, observability) @@ -67,4 +72,4 @@ def download_and_extract_template(project_name, framework, template, observabili cli.add_command(create) if __name__ == "__main__": - cli() \ No newline at end of file + cli() diff --git a/requirements.txt b/requirements.txt index 27a61d8..b7d4e51 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,2 +1,3 @@ click==8.1.7 -python-dotenv==1.0.1 \ No newline at end of file +python-dotenv==1.0.1 +inquirerpy==0.3.4 \ No newline at end of file From 938f206a2cc3d6a4656e2baedb716fb0da5f9222 Mon Sep 17 00:00:00 2001 From: pavanmantha Date: Tue, 20 Aug 2024 23:59:49 +0530 Subject: [PATCH 3/3] made inquirerpy more easy --- bootstraprag/cli.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/bootstraprag/cli.py b/bootstraprag/cli.py index 5be2a7e..7acb8a8 100644 --- a/bootstraprag/cli.py +++ b/bootstraprag/cli.py @@ -19,15 +19,17 @@ def create_zip(project_name): @click.command() @click.argument('project_name') -@click.option('--framework', type=click.Choice(['llamaindex', 'None']), - prompt="Which technology would you like to use ('None' will make you to use qdrant direct search)?", - default='', required=False) +@click.option('--framework', type=click.Choice([]),prompt=False) @click.option('--template', type=click.Choice([]), prompt=False) @click.option('--observability', type=click.Choice([]), prompt=False) def create(project_name, framework, template, observability): template_choices = [] observability_choices = [] - + framework_choices = ['llamaindex', 'None'] + framework = inquirer.select( + message="Which technology would you like to use ('None' will make you to use qdrant direct search)?", + choices=framework_choices + ).execute() if framework == 'llamaindex' or framework == 'langchain' or framework == 'haystack': template_choices = ['simple-rag', 'rag-with-react', 'rag-with-hyde', 'rag-with-flare'] elif framework == 'None':