Skip to content

Commit

Permalink
chore(internal): remove redundant client test (#1085)
Browse files Browse the repository at this point in the history
  • Loading branch information
stainless-bot committed Jan 17, 2024
1 parent 3e60db6 commit 947974f
Showing 1 changed file with 0 additions and 55 deletions.
55 changes: 0 additions & 55 deletions tests/test_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@
from openai import OpenAI, AsyncOpenAI, APIResponseValidationError
from openai._client import OpenAI, AsyncOpenAI
from openai._models import BaseModel, FinalRequestOptions
from openai._response import APIResponse, AsyncAPIResponse
from openai._constants import RAW_RESPONSE_HEADER
from openai._streaming import Stream, AsyncStream
from openai._exceptions import OpenAIError, APIStatusError, APITimeoutError, APIResponseValidationError
Expand Down Expand Up @@ -665,33 +664,6 @@ def test_parse_retry_after_header(self, remaining_retries: int, retry_after: str
calculated = client._calculate_retry_timeout(remaining_retries, options, headers)
assert calculated == pytest.approx(timeout, 0.5 * 0.875) # pyright: ignore[reportUnknownMemberType]

@mock.patch("openai._base_client.BaseClient._calculate_retry_timeout", _low_retry_timeout)
@pytest.mark.respx(base_url=base_url)
def test_streaming_response(self) -> None:
response = self.client.post(
"/chat/completions",
body=dict(
messages=[
{
"role": "user",
"content": "Say this is a test",
}
],
model="gpt-3.5-turbo",
),
cast_to=APIResponse[bytes],
options={"headers": {RAW_RESPONSE_HEADER: "stream"}},
)

assert not cast(Any, response.is_closed)
assert _get_open_connections(self.client) == 1

for _ in response.iter_bytes():
...

assert cast(Any, response.is_closed)
assert _get_open_connections(self.client) == 0

@mock.patch("openai._base_client.BaseClient._calculate_retry_timeout", _low_retry_timeout)
@pytest.mark.respx(base_url=base_url)
def test_retrying_timeout_errors_doesnt_leak(self, respx_mock: MockRouter) -> None:
Expand Down Expand Up @@ -1372,33 +1344,6 @@ async def test_parse_retry_after_header(self, remaining_retries: int, retry_afte
calculated = client._calculate_retry_timeout(remaining_retries, options, headers)
assert calculated == pytest.approx(timeout, 0.5 * 0.875) # pyright: ignore[reportUnknownMemberType]

@mock.patch("openai._base_client.BaseClient._calculate_retry_timeout", _low_retry_timeout)
@pytest.mark.respx(base_url=base_url)
async def test_streaming_response(self) -> None:
response = await self.client.post(
"/chat/completions",
body=dict(
messages=[
{
"role": "user",
"content": "Say this is a test",
}
],
model="gpt-3.5-turbo",
),
cast_to=AsyncAPIResponse[bytes],
options={"headers": {RAW_RESPONSE_HEADER: "stream"}},
)

assert not cast(Any, response.is_closed)
assert _get_open_connections(self.client) == 1

async for _ in response.iter_bytes():
...

assert cast(Any, response.is_closed)
assert _get_open_connections(self.client) == 0

@mock.patch("openai._base_client.BaseClient._calculate_retry_timeout", _low_retry_timeout)
@pytest.mark.respx(base_url=base_url)
async def test_retrying_timeout_errors_doesnt_leak(self, respx_mock: MockRouter) -> None:
Expand Down

0 comments on commit 947974f

Please sign in to comment.