Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Sync/pull org and openai api base bugfix into master #4935

13 changes: 12 additions & 1 deletion autogpt/config/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -144,7 +144,18 @@ def validate_plugins(cls, p: AutoGPTPluginTemplate | Any):
), f"Plugins must subclass AutoGPTPluginTemplate; {p} is a template instance"
return p

def get_azure_kwargs(self, model: str) -> dict[str, str]:
def get_openai_credentials(self, model: str) -> dict[str, str]:
credentials = {
"api_key": self.openai_api_key,
"api_base": self.openai_api_base,
"organization": self.openai_organization,
}
if self.use_azure:
azure_credentials = self.get_azure_credentials(model)
credentials.update(azure_credentials)
return credentials

def get_azure_credentials(self, model: str) -> dict[str, str]:
"""Get the kwargs for the Azure API."""

# Fix --gpt3only and --gpt4only in combination with Azure
Expand Down
18 changes: 4 additions & 14 deletions autogpt/llm/utils/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -78,17 +78,14 @@ def create_text_completion(
if temperature is None:
temperature = config.temperature

if config.use_azure:
kwargs = config.get_azure_kwargs(model)
else:
kwargs = {"model": model}
kwargs = {"model": model}
kwargs.update(config.get_openai_credentials(model))

response = iopenai.create_text_completion(
prompt=prompt,
**kwargs,
temperature=temperature,
max_tokens=max_output_tokens,
api_key=config.openai_api_key,
)
logger.debug(f"Response: {response}")

Expand Down Expand Up @@ -150,9 +147,7 @@ def create_chat_completion(
if message is not None:
return message

chat_completion_kwargs["api_key"] = config.openai_api_key
if config.use_azure:
chat_completion_kwargs.update(config.get_azure_kwargs(model))
chat_completion_kwargs.update(config.get_openai_credentials(model))

if functions:
chat_completion_kwargs["functions"] = [
Expand Down Expand Up @@ -196,12 +191,7 @@ def check_model(
config: Config,
) -> str:
"""Check if model is available for use. If not, return gpt-3.5-turbo."""
openai_credentials = {
"api_key": config.openai_api_key,
}
if config.use_azure:
openai_credentials.update(config.get_azure_kwargs(model_name))

openai_credentials = config.get_openai_credentials(model_name)
api_manager = ApiManager()
models = api_manager.get_models(**openai_credentials)

Expand Down
7 changes: 2 additions & 5 deletions autogpt/memory/vector/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,10 +41,8 @@ def get_embedding(
input = [text.replace("\n", " ") for text in input]

model = config.embedding_model
if config.use_azure:
kwargs = config.get_azure_kwargs(model)
else:
kwargs = {"model": model}
kwargs = {"model": model}
kwargs.update(config.get_openai_credentials(model))

logger.debug(
f"Getting embedding{f's for {len(input)} inputs' if multiple else ''}"
Expand All @@ -57,7 +55,6 @@ def get_embedding(
embeddings = iopenai.create_embedding(
input,
**kwargs,
api_key=config.openai_api_key,
).data

if not multiple:
Expand Down
1 change: 0 additions & 1 deletion autogpt/processing/text.py
Original file line number Diff line number Diff line change
Expand Up @@ -137,7 +137,6 @@ def summarize_text(
logger.info(f"Summarized {len(chunks)} chunks")

summary, _ = summarize_text("\n\n".join(summaries), config)

return summary.strip(), [
(summaries[i], chunks[i][0]) for i in range(0, len(chunks))
]
Expand Down
26 changes: 20 additions & 6 deletions tests/unit/test_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -174,18 +174,32 @@ def test_azure_config(config: Config, workspace: Workspace) -> None:

fast_llm = config.fast_llm
smart_llm = config.smart_llm
assert config.get_azure_kwargs(config.fast_llm)["deployment_id"] == "FAST-LLM_ID"
assert config.get_azure_kwargs(config.smart_llm)["deployment_id"] == "SMART-LLM_ID"
assert (
config.get_azure_credentials(config.fast_llm)["deployment_id"] == "FAST-LLM_ID"
)
assert (
config.get_azure_credentials(config.smart_llm)["deployment_id"]
== "SMART-LLM_ID"
)

# Emulate --gpt4only
config.fast_llm = smart_llm
assert config.get_azure_kwargs(config.fast_llm)["deployment_id"] == "SMART-LLM_ID"
assert config.get_azure_kwargs(config.smart_llm)["deployment_id"] == "SMART-LLM_ID"
assert (
config.get_azure_credentials(config.fast_llm)["deployment_id"] == "SMART-LLM_ID"
)
assert (
config.get_azure_credentials(config.smart_llm)["deployment_id"]
== "SMART-LLM_ID"
)

# Emulate --gpt3only
config.fast_llm = config.smart_llm = fast_llm
assert config.get_azure_kwargs(config.fast_llm)["deployment_id"] == "FAST-LLM_ID"
assert config.get_azure_kwargs(config.smart_llm)["deployment_id"] == "FAST-LLM_ID"
assert (
config.get_azure_credentials(config.fast_llm)["deployment_id"] == "FAST-LLM_ID"
)
assert (
config.get_azure_credentials(config.smart_llm)["deployment_id"] == "FAST-LLM_ID"
)

del os.environ["USE_AZURE"]
del os.environ["AZURE_CONFIG_FILE"]
Expand Down
Loading