Skip to content

Commit

Permalink
%%bash: read from stream until separator found or buffer is full (#14019
Browse files Browse the repository at this point in the history
)

Original issue: #14005
TLDR before we would read line by line however this is not necessary and
we should just continue reading until we reach EOF.

The case we patch here is if we read a line larger than the allowed
buffer size, we will return that chunk (aka the entire buffer) to be
written out and still continue reading.
  • Loading branch information
Carreau committed Apr 21, 2023
2 parents a3d5a06 + 78ba47c commit 47d043e
Showing 1 changed file with 13 additions and 4 deletions.
17 changes: 13 additions & 4 deletions IPython/core/magics/script.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
# Distributed under the terms of the Modified BSD License.

import asyncio
import asyncio.exceptions
import atexit
import errno
import os
Expand Down Expand Up @@ -208,15 +209,23 @@ def in_thread(coro):
"""Call a coroutine on the asyncio thread"""
return asyncio.run_coroutine_threadsafe(coro, event_loop).result()

async def _readchunk(stream):
try:
return await stream.readuntil(b"\n")
except asyncio.exceptions.IncompleteReadError as e:
return e.partial
except asyncio.exceptions.LimitOverrunError as e:
return await stream.read(e.consumed)

async def _handle_stream(stream, stream_arg, file_object):
while True:
line = (await stream.readline()).decode("utf8", errors="replace")
if not line:
chunk = (await _readchunk(stream)).decode("utf8", errors="replace")
if not chunk:
break
if stream_arg:
self.shell.user_ns[stream_arg] = line
self.shell.user_ns[stream_arg] = chunk
else:
file_object.write(line)
file_object.write(chunk)
file_object.flush()

async def _stream_communicate(process, cell):
Expand Down

0 comments on commit 47d043e

Please sign in to comment.