Skip to content

Commit

Permalink
Update patch version v1.4.1
Browse files Browse the repository at this point in the history
  • Loading branch information
kooyunmo committed Jun 19, 2024
1 parent 9bc5ff6 commit 64ee249
Show file tree
Hide file tree
Showing 5 changed files with 90 additions and 12 deletions.
56 changes: 45 additions & 11 deletions friendli/sdk/api/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -226,6 +226,21 @@ def __init__(
self._use_grpc = use_grpc
self._client = client or httpx.Client()
self._grpc_channel = grpc_channel
self._grpc_stub = None

def __enter__(self) -> ServingAPI:
"""Enter the context manager."""
return self

def __exit__(self, exc_type, exc_val, exc_tb) -> None:
"""Exit the context manager with cleaning up resources."""
self.close()

def close(self) -> None:
"""Close the gRPC channel and HTTP client."""
if self._grpc_channel:
self._grpc_channel.close()
self._client.close()

def _get_grpc_stub(self, channel: grpc.Channel) -> Any:
raise NotImplementedError # pragma: no cover
Expand All @@ -245,14 +260,16 @@ def _request(

if self._use_grpc:
grpc_request = self._build_grpc_request(data=data, model=model)
channel = self._grpc_channel or grpc.insecure_channel(
self._build_grpc_url()
)
if not self._grpc_channel:
self._grpc_channel = grpc.insecure_channel(self._build_grpc_url())
try:
stub = self._get_grpc_stub(channel)
if not self._grpc_stub:
self._grpc_stub = self._get_grpc_stub(self._grpc_channel)
except NotImplementedError as exc:
raise ValueError("This API does not support gRPC.") from exc
return stub.Generate(grpc_request)
assert self._grpc_stub
grpc_response = self._grpc_stub.Generate(grpc_request)
return grpc_response

http_request = self._build_http_request(data=data, model=model)
http_response = self._client.send(request=http_request, stream=stream)
Expand Down Expand Up @@ -294,6 +311,21 @@ def __init__(
self._use_grpc = use_grpc
self._client = client or httpx.AsyncClient()
self._grpc_channel = grpc_channel
self._grpc_stub = None

async def __aenter__(self) -> AsyncServingAPI:
"""Enter the async context manager."""
return self

async def __aexit__(self, exc_type, exc_val, exc_tb) -> None:
"""Exit the async context manager with cleaning up resources."""
await self.close()

async def close(self) -> None:
"""Close the gRPC channel and HTTP client."""
if self._grpc_channel:
await self._grpc_channel.close(grace=None)
await self._client.aclose()

def _get_grpc_stub(self, channel: grpc.aio.Channel) -> Any:
raise NotImplementedError # pragma: no cover
Expand All @@ -313,15 +345,17 @@ async def _request(

if self._use_grpc:
grpc_request = self._build_grpc_request(data=data, model=model)
channel = self._grpc_channel or grpc.aio.insecure_channel(
self._build_grpc_url()
)
if not self._grpc_channel:
self._grpc_channel = grpc.aio.insecure_channel(self._build_grpc_url())
try:
stub = self._get_grpc_stub(channel)
if not self._grpc_stub:
self._grpc_stub = self._get_grpc_stub(self._grpc_channel)
except NotImplementedError as exc:
raise ValueError("This API does not support gRPC.") from exc
grpc_response = stub.Generate(grpc_request, timeout=DEFAULT_REQ_TIMEOUT)
await grpc_response.wait_for_connection()
assert self._grpc_stub
grpc_response = self._grpc_stub.Generate(
grpc_request, timeout=DEFAULT_REQ_TIMEOUT
)
return grpc_response

http_request = self._build_http_request(data=data, model=model)
Expand Down
8 changes: 8 additions & 0 deletions friendli/sdk/api/chat/chat.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,10 @@ def __init__(
grpc_channel=grpc_channel,
)

def close(self) -> None:
"""Clean up all clients' resources."""
self.completions.close()


class AsyncChat:
"""Asynchronous chat API."""
Expand All @@ -61,3 +65,7 @@ def __init__(
client=client,
grpc_channel=grpc_channel,
)

async def close(self) -> None:
"""Clean up all clients' resources."""
await self.completions.close()
8 changes: 8 additions & 0 deletions friendli/sdk/api/images/images.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,10 @@ def __init__(
base_url=base_url, endpoint_id=endpoint_id, client=client
)

def close(self) -> None:
"""Clean up all clients' resources."""
self.text_to_image.close()


class AsyncImages:
"""Asynchronous images API."""
Expand All @@ -43,3 +47,7 @@ def __init__(
self.text_to_image = AsyncTextToImage(
base_url=base_url, endpoint_id=endpoint_id, client=client
)

async def close(self) -> None:
"""Clean up all clients' resources."""
await self.text_to_image.close()
28 changes: 28 additions & 0 deletions friendli/sdk/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -115,6 +115,20 @@ def __init__(
self.endpoint = EndpointApi(client=endpoint_client)
self.model = ModelApi(client=model_client)

def __enter__(self) -> Friendli:
"""Enter the context manager."""
return self

def __exit__(self, exc_type, exc_val, exc_tb) -> None:
"""Exit the context manager and close resources."""
self.close()

def close(self) -> None:
"""Clean up all clients' resources."""
self.completions.close()
self.chat.close()
self.images.close()


class AsyncFriendli(FriendliClientBase):
"""Async Friendli API client."""
Expand Down Expand Up @@ -164,3 +178,17 @@ def __init__(
self.images = AsyncImages(
base_url=self._base_url, endpoint_id=self._endpoint_id
)

async def __aenter__(self) -> AsyncFriendli:
"""Enter the asynchronous context manager."""
return self

async def __aexit__(self, exc_type, exc_val, exc_tb) -> None:
"""Exit the asynchronous context manager and close resources."""
await self.close()

async def close(self) -> None:
"""Clean up all clients' resources."""
await self.completions.close()
await self.chat.close()
await self.images.close()
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[tool.poetry]
name = "friendli-client"
version = "1.4.0"
version = "1.4.1"
description = "Client of Friendli Suite."
license = "Apache-2.0"
authors = ["FriendliAI teams <eng@friendli.ai>"]
Expand Down

0 comments on commit 64ee249

Please sign in to comment.