Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .release-please-manifest.json
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
{
".": "2.0.0-alpha.8"
".": "2.0.0-alpha.9"
}
4 changes: 2 additions & 2 deletions .stats.yml
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
configured_endpoints: 43
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/togetherai%2Ftogetherai-a1f15d8f8f7326616ea246a73d53bda093da7a9a5e3fe50a9ec6a5a0b958ec63.yml
openapi_spec_hash: 7a03e5140a9a6668ff42c47ea0d03a07
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/togetherai%2Ftogetherai-bfa7422593036f383fcc5209e8a52705f582be9480f90747f8962a46ed5b1152.yml
openapi_spec_hash: 400da476d5f86a3493bf6dacfe6826f0
config_hash: 87a5832ab2ecefe567d22108531232f5
13 changes: 13 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,18 @@
# Changelog

## 2.0.0-alpha.9 (2025-11-27)

Full Changelog: [v2.0.0-alpha.8...v2.0.0-alpha.9](https://github.com/togethercomputer/together-py/compare/v2.0.0-alpha.8...v2.0.0-alpha.9)

### Bug Fixes

* ensure streams are always closed ([db990c7](https://github.com/togethercomputer/together-py/commit/db990c744ebfffcfe48f52dc44b1ca7b47f1f79a))


### Chores

* **deps:** mypy 1.18.1 has a regression, pin to 1.17 ([2235b95](https://github.com/togethercomputer/together-py/commit/2235b95d3e8dc11c9edc308e2b4b69b1463d21cb))

## 2.0.0-alpha.8 (2025-11-26)

Full Changelog: [v2.0.0-alpha.7...v2.0.0-alpha.8](https://github.com/togethercomputer/together-py/compare/v2.0.0-alpha.7...v2.0.0-alpha.8)
Expand Down
4 changes: 2 additions & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[project]
name = "together"
version = "2.0.0-alpha.8"
version = "2.0.0-alpha.9"
description = "The official Python library for the together API"
dynamic = ["readme"]
license = "Apache-2.0"
Expand Down Expand Up @@ -67,7 +67,7 @@ conflicts = [
# version pins are in uv.lock
dev = [
"pyright==1.1.399",
"mypy",
"mypy==1.17",
"respx",
"pytest",
"pytest-asyncio",
Expand Down
4 changes: 2 additions & 2 deletions requirements-dev.lock
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@ multidict==6.7.0
# via
# aiohttp
# yarl
mypy==1.18.2
mypy==1.17.0
mypy-extensions==1.1.0
# via mypy
nodeenv==1.9.1
Expand All @@ -95,7 +95,7 @@ propcache==0.4.1
# via
# aiohttp
# yarl
pydantic==2.12.4
pydantic==2.12.5
# via together
pydantic-core==2.41.5
# via pydantic
Expand Down
98 changes: 50 additions & 48 deletions src/together/_streaming.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,30 +55,31 @@ def __stream__(self) -> Iterator[_T]:
process_data = self._client._process_response_data
iterator = self._iter_events()

for sse in iterator:
if sse.data.startswith("[DONE]"):
break

if sse.event is None:
data = sse.json()
if is_mapping(data) and data.get("error"):
message = None
error = data.get("error")
if is_mapping(error):
message = error.get("message")
if not message or not isinstance(message, str):
message = "An error occurred during streaming"

raise APIError(
message=message,
request=self.response.request,
body=data["error"],
)

yield process_data(data=data, cast_to=cast_to, response=response)

# As we might not fully consume the response stream, we need to close it explicitly
response.close()
try:
for sse in iterator:
if sse.data.startswith("[DONE]"):
break

if sse.event is None:
data = sse.json()
if is_mapping(data) and data.get("error"):
message = None
error = data.get("error")
if is_mapping(error):
message = error.get("message")
if not message or not isinstance(message, str):
message = "An error occurred during streaming"

raise APIError(
message=message,
request=self.response.request,
body=data["error"],
)

yield process_data(data=data, cast_to=cast_to, response=response)
finally:
# Ensure the response is closed even if the consumer doesn't read all data
response.close()

def __enter__(self) -> Self:
return self
Expand Down Expand Up @@ -137,30 +138,31 @@ async def __stream__(self) -> AsyncIterator[_T]:
process_data = self._client._process_response_data
iterator = self._iter_events()

async for sse in iterator:
if sse.data.startswith("[DONE]"):
break

if sse.event is None:
data = sse.json()
if is_mapping(data) and data.get("error"):
message = None
error = data.get("error")
if is_mapping(error):
message = error.get("message")
if not message or not isinstance(message, str):
message = "An error occurred during streaming"

raise APIError(
message=message,
request=self.response.request,
body=data["error"],
)

yield process_data(data=data, cast_to=cast_to, response=response)

# As we might not fully consume the response stream, we need to close it explicitly
await response.aclose()
try:
async for sse in iterator:
if sse.data.startswith("[DONE]"):
break

if sse.event is None:
data = sse.json()
if is_mapping(data) and data.get("error"):
message = None
error = data.get("error")
if is_mapping(error):
message = error.get("message")
if not message or not isinstance(message, str):
message = "An error occurred during streaming"

raise APIError(
message=message,
request=self.response.request,
body=data["error"],
)

yield process_data(data=data, cast_to=cast_to, response=response)
finally:
# Ensure the response is closed even if the consumer doesn't read all data
await response.aclose()

async def __aenter__(self) -> Self:
return self
Expand Down
2 changes: 1 addition & 1 deletion src/together/_version.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.

__title__ = "together"
__version__ = "2.0.0-alpha.8" # x-release-please-version
__version__ = "2.0.0-alpha.9" # x-release-please-version
Loading
Loading