Skip to content

Commit

Permalink
feat(openai): support explicit proxy (#321)
Browse files Browse the repository at this point in the history
* feat(openai): support explicit proxy

* fix(docs): correct AzureOpenAI docs and mention OpenAI proxy

* test: add tests for OpenAI proxy
  • Loading branch information
mspronesti committed Jun 28, 2023
1 parent 98382ba commit 809d9be
Show file tree
Hide file tree
Showing 7 changed files with 62 additions and 9 deletions.
2 changes: 1 addition & 1 deletion docs/building_docs.md
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ You can build and view this documentation project locally - we recommend that yo
and dependency management tool.
```console
# Install required Python dependencies (MkDocs etc.)
poetry insall --with docs
poetry install --with docs
# Run the mkdocs development server
mkdocs serve
```
Expand Down
26 changes: 20 additions & 6 deletions docs/llms.md
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ llm = OpenAI(openai_api_key="my-openai-api-key")
pandas_ai = PandasAI(llm=llm)
```

As an alternative, you can set the `OPENAI_API_KEY` environment variable and instantiate the OpenAI object without passing the API key:
As an alternative, you can set the `OPENAI_API_KEY` environment variable and instantiate the `OpenAI` object without passing the API key:

```python
from pandasai import PandasAI
Expand All @@ -26,6 +26,9 @@ llm = OpenAI() # no need to pass the API key, it will be read from the environme
pandas_ai = PandasAI(llm=llm)
```

If you are behind an explicit proxy, you can specify `openai_proxy` when instantiating the `OpenAI` object or set the `OPENAI_PROXY` environment variable to pass through.


## HuggingFace models

In order to use HuggingFace models, you need to have a HuggingFace API key. You can get one [here](https://huggingface.co/join).
Expand Down Expand Up @@ -111,24 +114,35 @@ pandas_ai = PandasAI(llm=llm)

## Azure OpenAI

In order to use Azure OpenAI models, you need to have an Azure API key. You can get one [here](https://azure.microsoft.com/it-it/products/cognitive-services/openai-service).
In order to use Azure OpenAI models, you need to have an Azure OpenAI API key as well as an Azure OpenAI endpoint. You can get one [here](https://azure.microsoft.com/it-it/products/cognitive-services/openai-service).

Once you have an API key, you can use it to instantiate an Azure OpenAI object:
To instantiate an Azure OpenAI object you also need to specify the name of your deployd model on Azure and the API version:

```python
from pandasai import PandasAI
from pandasai.llm.azure_openai import AzureOpenAI

llm = AzureOpenAI(azure_openai_api_key="my-azure-openai-api-key")
llm = AzureOpenAI(
api_key="my-azure-openai-api-key",
api_base="my-azure-openai-api-endpoint",
api_version="2023-05-15",
deployment_name="my-deployment-name"
)
pandas_ai = PandasAI(llm=llm)
```

As an alternative, you can set the `AZURE_OPENAI_KEY` environment variable and instantiate the Azure OpenAI object without passing the API key:
As an alternative, you can set the `OPENAI_API_KEY`, `OPENAI_API_VERSION` and `OPENAI_API_BASE` environment variables and instantiate the Azure OpenAI object without passing them:

```python
from pandasai import PandasAI
from pandasai.llm.azure_openai import AzureOpenAI

llm = AzureOpenAI() # no need to pass the API key, it will be read from the environment variable
llm = AzureOpenAI(
deployment_name="my-deployment-name"
) # no need to pass the API key, endpoint and API version. They are read from the environment variable
pandas_ai = PandasAI(llm=llm)
```

If you are behind an explicit proxy, you can specify `openai_proxy` when instantiating the `AzureOpenAI` object or set the `OPENAI_PROXY` environment variable to pass through.


7 changes: 7 additions & 0 deletions pandasai/llm/azure_openai.py
Original file line number Diff line number Diff line change
Expand Up @@ -87,6 +87,13 @@ def __init__(
self.is_chat_model = is_chat_model
self.engine = deployment_name

self.openai_proxy = kwargs.get("openai_proxy") or os.getenv("OPENAI_PROXY")
if self.openai_proxy:
openai.proxy = {
"http": self.openai_proxy,
"https": self.openai_proxy
}

self._set_params(**kwargs)

@property
Expand Down
2 changes: 2 additions & 0 deletions pandasai/llm/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -152,6 +152,8 @@ class BaseOpenAI(LLM, ABC):
frequency_penalty: float = 0
presence_penalty: float = 0.6
stop: Optional[str] = None
# support explicit proxy for OpenAI
openai_proxy: Optional[str] = None

def _set_params(self, **kwargs):
"""
Expand Down
7 changes: 7 additions & 0 deletions pandasai/llm/openai.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,6 +65,13 @@ def __init__(
raise APIKeyNotFoundError("OpenAI API key is required")
openai.api_key = self.api_token

self.openai_proxy = kwargs.get("openai_proxy") or os.getenv("OPENAI_PROXY")
if self.openai_proxy:
openai.proxy = {
"http": self.openai_proxy,
"https": self.openai_proxy
}

self._set_params(**kwargs)

@property
Expand Down
15 changes: 14 additions & 1 deletion tests/llms/test_azure_openai.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
"""Unit tests for the openai LLM class"""

import openai
import pytest

from pandasai.exceptions import APIKeyNotFoundError, UnsupportedOpenAIModelError
Expand Down Expand Up @@ -33,6 +33,19 @@ def test_type_with_token(self):
deployment_name="test"
).type == "azure-openai"

def test_proxy(self):
proxy = "http://proxy.mycompany.com:8080"
client = AzureOpenAI(
api_token="test",
api_base="test",
api_version="test",
deployment_name="test",
openai_proxy=proxy
)
assert client.openai_proxy == proxy
assert openai.proxy["http"] == proxy
assert openai.proxy["https"] == proxy

def test_params_setting(self):
llm = AzureOpenAI(
api_token="test",
Expand Down
12 changes: 11 additions & 1 deletion tests/llms/test_openai.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
"""Unit tests for the openai LLM class"""

import openai
import pytest

from pandasai.exceptions import APIKeyNotFoundError, UnsupportedOpenAIModelError
Expand All @@ -24,6 +24,16 @@ def test_type_without_token(self):
def test_type_with_token(self):
assert OpenAI(api_token="test").type == "openai"

def test_proxy(self):
proxy = "http://proxy.mycompany.com:8080"
client = OpenAI(
api_token="test",
openai_proxy=proxy
)
assert client.openai_proxy == proxy
assert openai.proxy["http"] == proxy
assert openai.proxy["https"] == proxy

def test_params_setting(self):
llm = OpenAI(
api_token="test",
Expand Down

0 comments on commit 809d9be

Please sign in to comment.