-
Notifications
You must be signed in to change notification settings - Fork 304
/
LLMHelper.py
112 lines (103 loc) · 4.39 KB
/
LLMHelper.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
from openai import AzureOpenAI
from typing import List
from langchain_openai import AzureChatOpenAI
from langchain_openai import AzureOpenAIEmbeddings
from langchain.callbacks.streaming_stdout import StreamingStdOutCallbackHandler
from .EnvHelper import EnvHelper
class LLMHelper:
def __init__(self):
self.env_helper: EnvHelper = EnvHelper()
self.auth_type = self.env_helper.AZURE_AUTH_TYPE
self.token_provider = self.env_helper.AZURE_TOKEN_PROVIDER
if self.auth_type == "rbac":
self.openai_client = AzureOpenAI(
azure_endpoint=self.env_helper.AZURE_OPENAI_ENDPOINT,
api_version=self.env_helper.AZURE_OPENAI_API_VERSION,
azure_ad_token_provider=self.token_provider,
)
else:
self.openai_client = AzureOpenAI(
azure_endpoint=self.env_helper.AZURE_OPENAI_ENDPOINT,
api_version=self.env_helper.AZURE_OPENAI_API_VERSION,
api_key=self.env_helper.OPENAI_API_KEY,
)
self.llm_model = self.env_helper.AZURE_OPENAI_MODEL
self.llm_max_tokens = (
self.env_helper.AZURE_OPENAI_MAX_TOKENS
if self.env_helper.AZURE_OPENAI_MAX_TOKENS != ""
else None
)
self.embedding_model = self.env_helper.AZURE_OPENAI_EMBEDDING_MODEL
def get_llm(self):
if self.auth_type == "rbac":
return AzureChatOpenAI(
deployment_name=self.llm_model,
temperature=0,
max_tokens=self.llm_max_tokens,
openai_api_version=self.openai_client._api_version,
azure_endpoint=self.env_helper.AZURE_OPENAI_ENDPOINT,
azure_ad_token_provider=self.token_provider,
)
else:
return AzureChatOpenAI(
deployment_name=self.llm_model,
temperature=0,
max_tokens=self.llm_max_tokens,
openai_api_version=self.openai_client._api_version,
azure_endpoint=self.env_helper.AZURE_OPENAI_ENDPOINT,
api_key=self.env_helper.OPENAI_API_KEY,
)
# TODO: This needs to have a custom callback to stream back to the UI
def get_streaming_llm(self):
if self.auth_type == "rbac":
return AzureChatOpenAI(
azure_endpoint=self.env_helper.AZURE_OPENAI_ENDPOINT,
api_key=self.env_helper.OPENAI_API_KEY,
streaming=True,
callbacks=[StreamingStdOutCallbackHandler],
deployment_name=self.llm_model,
temperature=0,
max_tokens=self.llm_max_tokens,
openai_api_version=self.openai_client._api_version,
azure_ad_token_provider=self.token_provider,
)
else:
return AzureChatOpenAI(
azure_endpoint=self.env_helper.AZURE_OPENAI_ENDPOINT,
api_key=self.env_helper.OPENAI_API_KEY,
streaming=True,
callbacks=[StreamingStdOutCallbackHandler],
deployment_name=self.llm_model,
temperature=0,
max_tokens=self.llm_max_tokens,
openai_api_version=self.openai_client._api_version,
)
def get_embedding_model(self):
if self.auth_type == "rbac":
return AzureOpenAIEmbeddings(
azure_endpoint=self.env_helper.AZURE_OPENAI_ENDPOINT,
azure_deployment=self.embedding_model,
chunk_size=1,
azure_ad_token_provider=self.token_provider,
)
else:
return AzureOpenAIEmbeddings(
azure_endpoint=self.env_helper.AZURE_OPENAI_ENDPOINT,
api_key=self.env_helper.OPENAI_API_KEY,
azure_deployment=self.embedding_model,
chunk_size=1,
)
def get_chat_completion_with_functions(
self, messages: List[dict], functions: List[dict], function_call: str = "auto"
):
return self.openai_client.chat.completions.create(
model=self.llm_model,
messages=messages,
functions=functions,
function_call=function_call,
)
def get_chat_completion(self, messages: List[dict]):
return self.openai_client.chat.completions.create(
model=self.llm_model,
messages=messages,
)