Skip to content

Commit

Permalink
Merge pull request #96 from CambioML/jojo-branch
Browse files Browse the repository at this point in the history
update openai, pydantic, and chromadb versions and examples to work with uniflow
  • Loading branch information
jojortz committed Dec 23, 2023
2 parents 1ef8276 + 8dece7b commit 1dd54bc
Show file tree
Hide file tree
Showing 60 changed files with 2,396 additions and 2,157 deletions.
10 changes: 2 additions & 8 deletions docker/pykoi-cpu-custom/app.py
Expand Up @@ -4,13 +4,9 @@
##########################################################
# Creating an OpenAI model (requires an OpenAI API key) #
##########################################################
# enter openai api key here
api_key = "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"

# Creating an OpenAI model
model = pykoi.ModelFactory.create_model(
model_source="openai",
api_key=api_key)
model = pykoi.ModelFactory.create_model(model_source="openai")

#####################################
# Creating a chatbot with the model #
Expand All @@ -25,9 +21,7 @@
###########################################################
# Create the application
# app = pykoi.Application(debug=False, share=True)
app = pykoi.Application(
debug=False,
share=True)
app = pykoi.Application(debug=False, share=True)
app.add_component(chatbot)
app.add_component(dashboard)
app.run()
2 changes: 1 addition & 1 deletion docs/conf.py
Expand Up @@ -79,5 +79,5 @@
""",
"class": "",
},
]
],
}
85 changes: 70 additions & 15 deletions example/chatbot/chatbot_in_jupyter.ipynb
Expand Up @@ -2,7 +2,7 @@
"cells": [
{
"cell_type": "code",
"execution_count": null,
"execution_count": 7,
"id": "61b49dc2",
"metadata": {},
"outputs": [],
Expand All @@ -21,36 +21,74 @@
},
{
"cell_type": "code",
"execution_count": null,
"execution_count": 8,
"id": "6a907bb3",
"metadata": {},
"outputs": [],
"outputs": [
{
"data": {
"text/plain": [
"True"
]
},
"execution_count": 8,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"from pykoi import Application\n",
"from pykoi.chat import ModelFactory\n",
"from pykoi.chat import QuestionAnswerDatabase\n",
"from pykoi.component import Chatbot"
"from pykoi.component import Chatbot\n",
"from dotenv import load_dotenv\n",
"load_dotenv()"
]
},
{
"cell_type": "code",
"execution_count": null,
"execution_count": 9,
"id": "15c2004b",
"metadata": {},
"outputs": [],
"source": [
"api_key = \"\"\n",
"\n",
"# Creating an OpenAI model\n",
"model = ModelFactory.create_model(model_source=\"openai\", api_key=api_key)"
"model = ModelFactory.create_model(model_source=\"openai\")"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Add `nest_asyncio` \n",
"Add `nest_asyncio` to avoid error. Since we're running another interface inside a Jupyter notebook where an asyncio event loop is already running, we'll encounter the error. (since The uvicorn.run() function uses asyncio.run(), which isn't compatible with a running event loop.)"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "0c07c943",
"execution_count": 10,
"metadata": {},
"outputs": [],
"source": [
"import nest_asyncio\n",
"nest_asyncio.apply()"
]
},
{
"cell_type": "code",
"execution_count": 11,
"id": "0c07c943",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Table contents after creating table:\n",
"ID: 1, Question: Who is Sam altman, Answer: He is the president of YC, Vote Status: n/a, Timestamp: 2023-12-20 13:37:43.095750\n"
]
}
],
"source": [
"database = QuestionAnswerDatabase(debug=True)\n",
"chatbot = Chatbot(model=model, feedback=\"vote\")\n",
Expand All @@ -61,14 +99,31 @@
},
{
"cell_type": "code",
"execution_count": null,
"execution_count": 12,
"id": "ae7bbef3",
"metadata": {},
"outputs": [],
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"INFO: Started server process [40457]\n",
"INFO: Waiting for application startup.\n",
"INFO: Application startup complete.\n",
"INFO: Uvicorn running on http://0.0.0.0:5000 (Press CTRL+C to quit)\n"
]
}
],
"source": [
"# import nest_asyncio\n",
"app.display()"
"app.run()"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
Expand All @@ -87,7 +142,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.10.12"
"version": "3.10.13"
}
},
"nbformat": 4,
Expand Down
212 changes: 50 additions & 162 deletions example/chatbot/demo_launch_app_cpu_openai.ipynb

Large diffs are not rendered by default.

9 changes: 2 additions & 7 deletions example/chatbot/demo_launch_app_cpu_openai.py
Expand Up @@ -15,25 +15,20 @@
python -m example.chatbot.demo_launch_app_cpu_openai
```
"""
import os

from dotenv import load_dotenv

from pykoi import Application
from pykoi.chat import ModelFactory
from pykoi.chat import QuestionAnswerDatabase
from pykoi.chat import ModelFactory, QuestionAnswerDatabase
from pykoi.component import Chatbot, Dashboard

##########################################################
# Creating an OpenAI model (requires an OpenAI API key) #
##########################################################
load_dotenv()
api_key = os.getenv("OPENAI_API_KEY")

# Creating an OpenAI model
model = ModelFactory.create_model(
model_source="openai",
api_key=api_key)
model = ModelFactory.create_model(model_source="openai")

#####################################
# Creating a chatbot with the model #
Expand Down
77 changes: 64 additions & 13 deletions example/chatbot/demo_launch_app_gpu_huggingface.ipynb
Expand Up @@ -21,7 +21,7 @@
},
{
"cell_type": "code",
"execution_count": null,
"execution_count": 1,
"metadata": {},
"outputs": [],
"source": [
Expand All @@ -44,7 +44,7 @@
},
{
"cell_type": "code",
"execution_count": null,
"execution_count": 2,
"metadata": {},
"outputs": [],
"source": [
Expand All @@ -66,7 +66,7 @@
},
{
"cell_type": "code",
"execution_count": null,
"execution_count": 3,
"metadata": {},
"outputs": [],
"source": [
Expand All @@ -75,12 +75,45 @@
},
{
"cell_type": "code",
"execution_count": null,
"execution_count": 4,
"metadata": {},
"outputs": [],
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"/opt/conda/envs/pykoi/lib/python3.10/site-packages/tqdm/auto.py:21: TqdmWarning: IProgress not found. Please update jupyter and ipywidgets. See https://ipywidgets.readthedocs.io/en/stable/user_install.html\n",
" from .autonotebook import tqdm as notebook_tqdm\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"[HuggingfaceModel] loading model...\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"\n",
"WARNING: You are currently loading Falcon using legacy code contained in the model repository. Falcon has now been fully ported into the Hugging Face transformers library. For the most up-to-date and high-performance version of the Falcon model code, please update to the latest version of transformers and then load the model without the trust_remote_code=True argument.\n",
"\n",
"Loading checkpoint shards: 100%|██████████| 2/2 [00:36<00:00, 18.18s/it]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"[HuggingfaceModel] loading tokenizer...\n"
]
}
],
"source": [
"model = ModelFactory.create_model(\n",
" model_source=\"huggingface\", \n",
" model_source=\"huggingface\",\n",
" pretrained_model_name_or_path=\"tiiuae/falcon-7b\",\n",
" trust_remote_code=True, ## TODO: set as default\n",
" load_in_8bit=True\n",
Expand All @@ -89,7 +122,7 @@
},
{
"cell_type": "code",
"execution_count": null,
"execution_count": 5,
"metadata": {},
"outputs": [],
"source": [
Expand All @@ -115,7 +148,7 @@
},
{
"cell_type": "code",
"execution_count": null,
"execution_count": 6,
"metadata": {},
"outputs": [],
"source": [
Expand All @@ -132,7 +165,7 @@
},
{
"cell_type": "code",
"execution_count": null,
"execution_count": 7,
"metadata": {},
"outputs": [],
"source": [
Expand All @@ -143,9 +176,27 @@
},
{
"cell_type": "code",
"execution_count": null,
"execution_count": 8,
"metadata": {},
"outputs": [],
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"INFO: Started server process [7578]\n",
"INFO: Waiting for application startup.\n",
"INFO: Application startup complete.\n",
"INFO: Uvicorn running on http://0.0.0.0:5000 (Press CTRL+C to quit)\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"Public URL: https://a63d9b47dea54a.lhr.life\n"
]
}
],
"source": [
"app = Application(debug=False, share=True)\n",
"app.add_component(chatbot)\n",
Expand Down Expand Up @@ -208,7 +259,7 @@
"kernelspec": {
"display_name": "pykoi",
"language": "python",
"name": "0731a"
"name": "python3"
},
"language_info": {
"codemirror_mode": {
Expand All @@ -220,7 +271,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.10.12"
"version": "3.10.13"
}
},
"nbformat": 4,
Expand Down
4 changes: 1 addition & 3 deletions example/chatbot/demo_launch_app_gpu_huggingface.py
Expand Up @@ -12,11 +12,9 @@
```
"""
from pykoi import Application
from pykoi.chat import ModelFactory
from pykoi.chat import QuestionAnswerDatabase
from pykoi.chat import ModelFactory, QuestionAnswerDatabase
from pykoi.component import Chatbot, Dashboard


###################################################################################
# Creating a Huggingface model tiiuae/falcon-7b (EC2 g5.4xlarge with 100GB space) #
###################################################################################
Expand Down
4 changes: 1 addition & 3 deletions example/chatbot/demo_launch_app_gpu_huggingface_peft.py
Expand Up @@ -13,11 +13,9 @@
"""

from pykoi import Application
from pykoi.chat import ModelFactory
from pykoi.chat import QuestionAnswerDatabase
from pykoi.chat import ModelFactory, QuestionAnswerDatabase
from pykoi.component import Chatbot, Dashboard


###################################################################################
# Creating a Huggingface model tiiuae/falcon-7b (EC2 g5.4xlarge with 100GB space) #
###################################################################################
Expand Down

0 comments on commit 1dd54bc

Please sign in to comment.