-
Notifications
You must be signed in to change notification settings - Fork 2.9k
/
AzureOpenAIModel.py
110 lines (100 loc) · 3.68 KB
/
AzureOpenAIModel.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
from langchain_openai import AzureChatOpenAI
from pydantic.v1 import SecretStr
from langflow.base.constants import STREAM_INFO_TEXT
from langflow.base.models.model import LCModelComponent
from langflow.field_typing import LanguageModel
from langflow.io import BoolInput, DropdownInput, FloatInput, IntInput, MessageInput, Output, SecretStrInput, StrInput
class AzureChatOpenAIComponent(LCModelComponent):
display_name: str = "Azure OpenAI"
description: str = "Generate text using Azure OpenAI LLMs."
documentation: str = "https://python.langchain.com/docs/integrations/llms/azure_openai"
beta = False
icon = "Azure"
AZURE_OPENAI_MODELS = [
"gpt-35-turbo",
"gpt-35-turbo-16k",
"gpt-35-turbo-instruct",
"gpt-4",
"gpt-4-32k",
"gpt-4o",
"gpt-4-turbo",
]
AZURE_OPENAI_API_VERSIONS = [
"2023-03-15-preview",
"2023-05-15",
"2023-06-01-preview",
"2023-07-01-preview",
"2023-08-01-preview",
"2023-09-01-preview",
"2023-12-01-preview",
"2024-04-09",
"2024-05-13",
]
inputs = [
DropdownInput(
name="model",
display_name="Model Name",
options=AZURE_OPENAI_MODELS,
value=AZURE_OPENAI_MODELS[0],
),
StrInput(
name="azure_endpoint",
display_name="Azure Endpoint",
info="Your Azure endpoint, including the resource. Example: `https://example-resource.azure.openai.com/`",
),
StrInput(name="azure_deployment", display_name="Deployment Name"),
DropdownInput(
name="api_version",
display_name="API Version",
options=AZURE_OPENAI_API_VERSIONS,
value=AZURE_OPENAI_API_VERSIONS[-1],
advanced=True,
),
SecretStrInput(name="api_key", display_name="API Key", password=True),
FloatInput(name="temperature", display_name="Temperature", value=0.7),
IntInput(
name="max_tokens",
display_name="Max Tokens",
advanced=True,
info="The maximum number of tokens to generate. Set to 0 for unlimited tokens.",
),
MessageInput(name="input_value", display_name="Input"),
BoolInput(name="stream", display_name="Stream", info=STREAM_INFO_TEXT, advanced=True),
StrInput(
name="system_message",
display_name="System Message",
advanced=True,
info="System message to pass to the model.",
),
]
outputs = [
Output(display_name="Text", name="text_output", method="text_response"),
Output(display_name="Language Model", name="model_output", method="model_response"),
]
def model_response(self) -> LanguageModel:
model = self.model
azure_endpoint = self.azure_endpoint
azure_deployment = self.azure_deployment
api_version = self.api_version
api_key = self.api_key
temperature = self.temperature
max_tokens = self.max_tokens
stream = self.stream
if api_key:
secret_api_key = SecretStr(api_key)
else:
secret_api_key = None
try:
output = AzureChatOpenAI(
model=model,
azure_endpoint=azure_endpoint,
azure_deployment=azure_deployment,
api_version=api_version,
api_key=secret_api_key,
temperature=temperature,
max_tokens=max_tokens or None,
streaming=stream,
)
except Exception as e:
raise ValueError("Could not connect to AzureOpenAI API.") from e
return output