You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
Traceback (most recent call last):
File "/opt/conda/envs/g/lib/python3.9/site-packages/urllib3/response.py", line 710, in _error_catcher
yield
File "/opt/conda/envs/g/lib/python3.9/site-packages/urllib3/response.py", line 1077, in read_chunked
self._update_chunk_length()
File "/opt/conda/envs/g/lib/python3.9/site-packages/urllib3/response.py", line 1012, in _update_chunk_length
raise InvalidChunkLength(self, line) from None
urllib3.exceptions.InvalidChunkLength: InvalidChunkLength(got length b'', 0 bytes read)
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "/opt/conda/envs/g/lib/python3.9/site-packages/requests/models.py", line 816, in generate
yield from self.raw.stream(chunk_size, decode_content=True)
File "/opt/conda/envs/g/lib/python3.9/site-packages/urllib3/response.py", line 937, in stream
yield from self.read_chunked(amt, decode_content=decode_content)
File "/opt/conda/envs/g/lib/python3.9/site-packages/urllib3/response.py", line 1106, in read_chunked
self._original_response.close()
File "/opt/conda/envs/g/lib/python3.9/contextlib.py", line 137, in exit
self.gen.throw(typ, value, traceback)
File "/opt/conda/envs/g/lib/python3.9/site-packages/urllib3/response.py", line 727, in _error_catcher
raise ProtocolError(f"Connection broken: {e!r}", e) from e
urllib3.exceptions.ProtocolError: ("Connection broken: InvalidChunkLength(got length b'', 0 bytes read)", InvalidChunkLength(got length b'', 0 bytes read))
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/share/ad/wangsiyuan09/test.py", line 5, in
for chunk in openai.ChatCompletion.create(
File "/opt/conda/envs/g/lib/python3.9/site-packages/openai/api_resources/abstract/engine_api_resource.py", line 166, in
return (
File "/opt/conda/envs/g/lib/python3.9/site-packages/openai/api_requestor.py", line 692, in
return (
File "/opt/conda/envs/g/lib/python3.9/site-packages/openai/api_requestor.py", line 115, in parse_stream
for line in rbody:
File "/opt/conda/envs/g/lib/python3.9/site-packages/requests/models.py", line 865, in iter_lines
for chunk in self.iter_content(
File "/opt/conda/envs/g/lib/python3.9/site-packages/requests/models.py", line 818, in generate
raise ChunkedEncodingError(e)
requests.exceptions.ChunkedEncodingError: ("Connection broken: InvalidChunkLength(got length b'', 0 bytes read)", InvalidChunkLength(got length b'', 0 bytes read))
Expected Behavior
No response
Steps To Reproduce
import openai
if name == "main":
openai.api_base = "http://localhost:8000/v1"
openai.api_key = "none"
for chunk in openai.ChatCompletion.create(
model="chatglm2-6b",
messages=[
{"role": "user", "content": "你好"}
],
stream=True
):
if hasattr(chunk.choices[0].delta, "content"):
print(chunk.choices[0].delta.content, end="", flush=True)
Environment
- OS:
- Python:
- Transformers:
- PyTorch:
- CUDA Support (`python -c "import torch; print(torch.cuda.is_available())"`) :
Anything else?
No response
The text was updated successfully, but these errors were encountered:
Is there an existing issue for this?
Current Behavior
Traceback (most recent call last):
File "/opt/conda/envs/g/lib/python3.9/site-packages/urllib3/response.py", line 710, in _error_catcher
yield
File "/opt/conda/envs/g/lib/python3.9/site-packages/urllib3/response.py", line 1077, in read_chunked
self._update_chunk_length()
File "/opt/conda/envs/g/lib/python3.9/site-packages/urllib3/response.py", line 1012, in _update_chunk_length
raise InvalidChunkLength(self, line) from None
urllib3.exceptions.InvalidChunkLength: InvalidChunkLength(got length b'', 0 bytes read)
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "/opt/conda/envs/g/lib/python3.9/site-packages/requests/models.py", line 816, in generate
yield from self.raw.stream(chunk_size, decode_content=True)
File "/opt/conda/envs/g/lib/python3.9/site-packages/urllib3/response.py", line 937, in stream
yield from self.read_chunked(amt, decode_content=decode_content)
File "/opt/conda/envs/g/lib/python3.9/site-packages/urllib3/response.py", line 1106, in read_chunked
self._original_response.close()
File "/opt/conda/envs/g/lib/python3.9/contextlib.py", line 137, in exit
self.gen.throw(typ, value, traceback)
File "/opt/conda/envs/g/lib/python3.9/site-packages/urllib3/response.py", line 727, in _error_catcher
raise ProtocolError(f"Connection broken: {e!r}", e) from e
urllib3.exceptions.ProtocolError: ("Connection broken: InvalidChunkLength(got length b'', 0 bytes read)", InvalidChunkLength(got length b'', 0 bytes read))
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/share/ad/wangsiyuan09/test.py", line 5, in
for chunk in openai.ChatCompletion.create(
File "/opt/conda/envs/g/lib/python3.9/site-packages/openai/api_resources/abstract/engine_api_resource.py", line 166, in
return (
File "/opt/conda/envs/g/lib/python3.9/site-packages/openai/api_requestor.py", line 692, in
return (
File "/opt/conda/envs/g/lib/python3.9/site-packages/openai/api_requestor.py", line 115, in parse_stream
for line in rbody:
File "/opt/conda/envs/g/lib/python3.9/site-packages/requests/models.py", line 865, in iter_lines
for chunk in self.iter_content(
File "/opt/conda/envs/g/lib/python3.9/site-packages/requests/models.py", line 818, in generate
raise ChunkedEncodingError(e)
requests.exceptions.ChunkedEncodingError: ("Connection broken: InvalidChunkLength(got length b'', 0 bytes read)", InvalidChunkLength(got length b'', 0 bytes read))
Expected Behavior
No response
Steps To Reproduce
import openai
if name == "main":
openai.api_base = "http://localhost:8000/v1"
openai.api_key = "none"
for chunk in openai.ChatCompletion.create(
model="chatglm2-6b",
messages=[
{"role": "user", "content": "你好"}
],
stream=True
):
if hasattr(chunk.choices[0].delta, "content"):
print(chunk.choices[0].delta.content, end="", flush=True)
Environment
Anything else?
No response
The text was updated successfully, but these errors were encountered: