Skip to content

Commit

Permalink
chore: refine litellm
Browse files Browse the repository at this point in the history
  • Loading branch information
iuiaoin committed Aug 9, 2023
1 parent 59e0924 commit 473e273
Show file tree
Hide file tree
Showing 3 changed files with 35 additions and 18 deletions.
3 changes: 2 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -5,4 +5,5 @@ config.json
assets/*.png
assets/*.mp4
.ruff_cache
plugins/**/
plugins/**/
litellm_uuid.txt
30 changes: 20 additions & 10 deletions bot/bot.py
Original file line number Diff line number Diff line change
@@ -1,26 +1,36 @@
import litellm
# import litellm
from common.context import Context
from config import conf
from common.singleton import singleton
from common.reply import Reply



@singleton
class Bot:
def __init__(self):
use_azure_chatgpt = conf().get("use_azure_chatgpt", False)
model = conf().get("model", "gpt-3.5-turbo")
# model = conf().get("model", "gpt-3.5-turbo")
if use_azure_chatgpt:
from bot.azure_chatgpt import AzureChatGPTBot

self.bot = AzureChatGPTBot()
elif model in litellm.model_list:
# see litellm supported models here:
# https://litellm.readthedocs.io/en/latest/supported/
from bot.litellm import liteLLMChatGPTBot
self.bot = liteLLMChatGPTBot()
else:
from bot.chatgpt import ChatGPTBot
self.bot = ChatGPTBot()
# see litellm supported models here:
# https://litellm.readthedocs.io/en/latest/supported/
from bot.litellm import LiteLLMChatGPTBot

self.bot = LiteLLMChatGPTBot()

# elif model in litellm.open_ai_chat_completion_models:
# from bot.chatgpt import ChatGPTBot

# self.bot = ChatGPTBot()
# else:
# # see litellm supported models here:
# # https://litellm.readthedocs.io/en/latest/supported/
# from bot.litellm import LiteLLMChatGPTBot

# self.bot = LiteLLMChatGPTBot()

def reply(self, context: Context) -> Reply:
return self.bot.reply(context)
20 changes: 13 additions & 7 deletions bot/litellm.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,23 +4,29 @@
from litellm import completion
from utils.log import logger
from config import conf
import os

class liteLLMChatGPTBot(ChatGPTBot):
def __init__(self):
openai.api_key = conf().get("openai_api_key")
os.environ['OPENAI_API_KEY'] = openai.api_key # litellm reads env variables for keys

# extra litellm configs:
class LiteLLMChatGPTBot(ChatGPTBot):
def __init__(self):
api_key = conf().get("openai_api_key")
model = conf().get("model", "gpt-3.5-turbo")
api_base = conf().get("openai_api_base")
proxy = conf().get("proxy")

if model in litellm.cohere_models:
litellm.cohere_key = api_key
elif model in litellm.anthropic_models:
litellm.anthropic_key = api_key
else:
litellm.openai_key = api_key

if api_base:
litellm.api_base = api_base
if proxy:
openai.proxy = proxy
self.name = self.__class__.__name__
self.args = {
"model": conf().get("model"),
"model": model,
"temperature": conf().get("temperature"),
}

Expand Down

0 comments on commit 473e273

Please sign in to comment.