Skip to content

Commit

Permalink
update cookbook (#106)
Browse files Browse the repository at this point in the history
  • Loading branch information
TengHu authored Dec 28, 2023
1 parent cabc243 commit 162b58b
Show file tree
Hide file tree
Showing 9 changed files with 276 additions and 534 deletions.
10 changes: 5 additions & 5 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -212,10 +212,10 @@ client.chat.completions.create(
actions=[a1, a2, a3], # First, LLM respond with either a1, a2 or a3, or text without action
# Define the orchestration logic for actions:
orch={
a1: [a2, a3], # If a1 is invoked, the next response will be either a2, a3 or a text response.
a2: a3, # If a2 is invoked, the next action will be a3
a3: [a4] # If a3 is invoked, the next response will be a4 or a text response.
a4: None # If a4 is invoked, the next response will guarantee to be a text message
a1.name: [a2, a3], # If a1 is invoked, the next response will be either a2, a3 or a text response.
a2.name: a3, # If a2 is invoked, the next action will be a3
a3.name: [a4] # If a3 is invoked, the next response will be a4 or a text response.
a4.name: None # If a4 is invoked, the next response will guarantee to be a text message
}
)
```
Expand Down Expand Up @@ -280,7 +280,7 @@ class FileAgent(AgentV0):

def __call__(self, text):
self.messages += [{"role": "user", "content":text}]
return self.llm.chat.completions.create(model="gpt-3.5-turbo", messages=self.messages, actions = [self.list_all_files_in_repo], orch = {self.handle_file: [self.list_all_files_in_repo, self.read_from_file]})
return self.llm.chat.completions.create(model="gpt-3.5-turbo", messages=self.messages, actions = [self.list_all_files_in_repo], orch = {self.handle_file.name: [self.list_all_files_in_repo, self.read_from_file]})
```

## Contributing
Expand Down
104 changes: 52 additions & 52 deletions docs/source/notebooks/cookbooks/azure_tutorial.ipynb

Large diffs are not rendered by default.

1 change: 0 additions & 1 deletion docs/source/notebooks/cookbooks/cookbook.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,6 @@
"\n",
"# Examples\n",
"\n",
"- [ReAct/Planning Agent](planning_agent.ipynb)\n",
"- [Function Calling with Mistralai/Mistral-7B-Instruct-v0.1 through Anyscale Endpoints](anyscale.ipynb)\n"
]
},
Expand Down
84 changes: 63 additions & 21 deletions docs/source/notebooks/cookbooks/orchestration.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -14,32 +14,56 @@
},
{
"cell_type": "code",
"execution_count": 45,
"execution_count": 2,
"id": "5e7451c8-ddb3-498a-96b4-f166fafd783a",
"metadata": {},
"outputs": [],
"source": [
"import os\n",
"import openai\n",
"import logging\n",
"from pydantic import BaseModel\n",
"from actionweaver.llms.openai.tools.chat import OpenAIChatCompletion\n",
"from actionweaver import action\n",
"from openai import OpenAI\n",
"\n",
"openai.api_key = os.getenv(\"OPENAI_API_KEY\")"
]
},
{
"cell_type": "markdown",
"id": "3a46ccb2-033d-404c-8213-1481e0da9196",
"metadata": {},
"source": [
"**Patch OpenAI client**"
]
},
{
"cell_type": "code",
"execution_count": 3,
"id": "d3f2465e-527f-47f8-a0ae-6246343a09a4",
"metadata": {},
"outputs": [],
"source": [
"from actionweaver.llms import patch\n",
"\n",
"from actionweaver.actions.factories.pydantic_model_to_action import action_from_model\n",
"openai.api_key = os.getenv(\"OPENAI_API_KEY\")\n"
"openai_client = patch(OpenAI())"
]
},
{
"cell_type": "markdown",
"id": "e2fa587b-0637-4443-9353-33226e611df1",
"metadata": {},
"source": [
"**Define function you want model to invoke**"
]
},
{
"cell_type": "code",
"execution_count": 46,
"execution_count": 4,
"id": "da75c6a8-14ad-43a9-9378-fc166604f5d2",
"metadata": {},
"outputs": [],
"source": [
"import os\n",
"from actionweaver import action\n",
"from typing import List\n",
"\n",
"\n",
"@action(name=\"FileHandler\")\n",
"def handle_file(instruction: str) -> str:\n",
" \"\"\"\n",
Expand Down Expand Up @@ -83,10 +107,7 @@
" print (f\"Read file from {file_path}\")\n",
" with open(file_path, 'r') as file:\n",
" content = file.read()\n",
" return f\"The file content: \\n{content}\"\n",
"\n",
"\n",
"chat = OpenAIChatCompletion(\"gpt-3.5-turbo\")\n"
" return f\"The file content: \\n{content}\"\n"
]
},
{
Expand All @@ -108,16 +129,29 @@
},
{
"cell_type": "code",
"execution_count": null,
"execution_count": 12,
"id": "725aa18a-375f-4f1e-84fb-9155ec43f837",
"metadata": {},
"outputs": [],
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Handling file...\n",
"List all files in ....\n"
]
}
],
"source": [
"chat.create([{\"role\": \"user\", \"content\": \"list all files, then read a file in current dir\"}], \n",
" actions = [handle_file], \n",
" orch = {\n",
" handle_file: [read_from_file, list_all_files_in_repo]\n",
" })"
"response = openai_client.chat.completions.create(\n",
" model=\"gpt-3.5-turbo\",\n",
" messages=[{\"role\": \"user\", \"content\": \"list all files\"}],\n",
" actions = [handle_file],\n",
" orch = {\n",
" handle_file.name: [read_from_file, list_all_files_in_repo]\n",
" },\n",
" stream=False, \n",
")"
]
},
{
Expand All @@ -127,6 +161,14 @@
"metadata": {},
"outputs": [],
"source": []
},
{
"cell_type": "code",
"execution_count": null,
"id": "d763422d-435f-4776-ba81-f3030fc50476",
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
Expand Down
Loading

0 comments on commit 162b58b

Please sign in to comment.