You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
While calling humloop streaming endpoint, i encountered this error. Though this is caused by aiohttphere but not sure what is recommended way to stream in humanloop with prompt deployed on humanloop
raceback (most recent call last):
File "menv/lib/python3.10/site-packages/starlette/responses.py", line 264, in __call__
await wrap(partial(self.listen_for_disconnect, receive))
File "menv/lib/python3.10/site-packages/starlette/responses.py", line 260, in wrap
await func()
File "menv/lib/python3.10/site-packages/starlette/responses.py", line 237, in listen_for_disconnect
message = await receive()
File "menv/lib/python3.10/site-packages/uvicorn/protocols/http/h11_impl.py", line 535, in receive
await self.message_event.wait()
File "/usr/lib/python3.10/asyncio/locks.py", line 214, in wait
await fut
asyncio.exceptions.CancelledError: Cancelled by cancel scope 7ed1bb777040
During handling of the above exception, another exception occurred:
+ Exception Group Traceback (most recent call last):
| File "menv/lib/python3.10/site-packages/uvicorn/protocols/http/h11_impl.py", line 407, in run_asgi
| result = await app( # type: ignore[func-returns-value]
| File "menv/lib/python3.10/site-packages/uvicorn/middleware/proxy_headers.py", line 69, in __call__
| return await self.app(scope, receive, send)
| File "menv/lib/python3.10/site-packages/fastapi/applications.py", line 1054, in __call__
| await super().__call__(scope, receive, send)
| File "menv/lib/python3.10/site-packages/starlette/applications.py", line 123, in __call__
| await self.middleware_stack(scope, receive, send)
| File "menv/lib/python3.10/site-packages/starlette/middleware/errors.py", line 186, in __call__
| raise exc
| File "menv/lib/python3.10/site-packages/starlette/middleware/errors.py", line 164, in __call__
| await self.app(scope, receive, _send)
| File "menv/lib/python3.10/site-packages/starlette/middleware/exceptions.py", line 62, in __call__
| await wrap_app_handling_exceptions(self.app, conn)(scope, receive, send)
| File "menv/lib/python3.10/site-packages/starlette/_exception_handler.py", line 64, in wrapped_app
| raise exc
| File "menv/lib/python3.10/site-packages/starlette/_exception_handler.py", line 53, in wrapped_app
| await app(scope, receive, sender)
| File "menv/lib/python3.10/site-packages/starlette/routing.py", line 758, in __call__
| await self.middleware_stack(scope, receive, send)
| File "menv/lib/python3.10/site-packages/starlette/routing.py", line 778, in app
| await route.handle(scope, receive, send)
| File "menv/lib/python3.10/site-packages/starlette/routing.py", line 299, in handle
| await self.app(scope, receive, send)
| File "menv/lib/python3.10/site-packages/starlette/routing.py", line 79, in app
| await wrap_app_handling_exceptions(app, request)(scope, receive, send)
| File "menv/lib/python3.10/site-packages/starlette/_exception_handler.py", line 64, in wrapped_app
| raise exc
| File "menv/lib/python3.10/site-packages/starlette/_exception_handler.py", line 53, in wrapped_app
| await app(scope, receive, sender)
| File "menv/lib/python3.10/site-packages/starlette/routing.py", line 77, in app
| await response(scope, receive, send)
| File "menv/lib/python3.10/site-packages/starlette/responses.py", line 257, in __call__
| async with anyio.create_task_group() as task_group:
| File "menv/lib/python3.10/site-packages/anyio/_backends/_asyncio.py", line 678, in __aexit__
| raise BaseExceptionGroup(
| exceptiongroup.ExceptionGroup: unhandled errors in a TaskGroup (1 sub-exception)
+-+---------------- 1 ----------------
| Traceback (most recent call last):
| File "src/prompts/conversation.py", line 388, in qa_conversation_hl
| async for chunk in generator:
| File "src/prompts/conversation.py", line 57, in humanloop_stream
| async for token in response.content:
| File "menv/lib/python3.10/site-packages/humanloop/client_custom.py", line 43, in _parsed_generator
| async for line in generator:
| File "menv/lib/python3.10/site-packages/humanloop/paths/chat_deployed/post.py", line 278, in stream_iterator
| async for line in response.http_response.content:
| File "menv/lib/python3.10/site-packages/aiohttp/streams.py", line 44, in __anext__
| rv = await self.read_func()
| File "menv/lib/python3.10/site-packages/aiohttp/streams.py", line 307, in readline
| return await self.readuntil()
| File "menv/lib/python3.10/site-packages/aiohttp/streams.py", line 339, in readuntil
| raise ValueError("Chunk too big")
| ValueError: Chunk too big
|
+------------------------------------
```
The text was updated successfully, but these errors were encountered:
While calling humloop streaming endpoint, i encountered this error. Though this is caused by
aiohttp
here but not sure what is recommended way to stream in humanloop with prompt deployed on humanloopError log
The text was updated successfully, but these errors were encountered: