From b26d27281f79f46453b43e1d993eddd21ed1e76f Mon Sep 17 00:00:00 2001 From: Anna Henningsen Date: Fri, 2 Feb 2018 01:41:35 +0100 Subject: [PATCH] stream: always reset awaitDrain when emitting data MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit The complicated `awaitDrain` machinery can be made a bit slimmer, and more correct, by just resetting the value each time `stream.emit('data')` is called. By resetting the value before emitting the data chunk, and seeing whether any pipe destinations return `.write() === false`, we always end up in a consistent state and don’t need to worry about odd situations (like `dest.write(chunk)` emitting more data). PR-URL: https://github.com/nodejs/node/pull/18516 Fixes: https://github.com/nodejs/node/issues/18484 Fixes: https://github.com/nodejs/node/issues/18512 Refs: https://github.com/nodejs/node/pull/18515 Reviewed-By: Anatoli Papirovski Reviewed-By: Matteo Collina Reviewed-By: James M Snell Reviewed-By: Minwoo Jung Reviewed-By: Ruben Bridgewater --- lib/_stream_readable.js | 13 +++---- .../test-stream-pipe-manual-resume.js | 35 +++++++++++++++++++ 2 files changed, 39 insertions(+), 9 deletions(-) create mode 100644 test/parallel/test-stream-pipe-manual-resume.js diff --git a/lib/_stream_readable.js b/lib/_stream_readable.js index 9713224bd7fb83..9efde9b5f3f2a6 100644 --- a/lib/_stream_readable.js +++ b/lib/_stream_readable.js @@ -266,6 +266,7 @@ function readableAddChunk(stream, chunk, encoding, addToFront, skipChunkCheck) { function addChunk(stream, state, chunk, addToFront) { if (state.flowing && state.length === 0 && !state.sync) { + state.awaitDrain = 0; stream.emit('data', chunk); stream.read(0); } else { @@ -465,6 +466,7 @@ Readable.prototype.read = function(n) { n = 0; } else { state.length -= n; + state.awaitDrain = 0; } if (state.length === 0) { @@ -634,17 +636,12 @@ Readable.prototype.pipe = function(dest, pipeOpts) { ondrain(); } - // If the user pushes more data while we're writing to dest then we'll end up - // in ondata again. However, we only want to increase awaitDrain once because - // dest will only emit one 'drain' event for the multiple writes. - // => Introduce a guard on increasing awaitDrain. - var increasedAwaitDrain = false; src.on('data', ondata); function ondata(chunk) { debug('ondata'); - increasedAwaitDrain = false; var ret = dest.write(chunk); - if (false === ret && !increasedAwaitDrain) { + debug('dest.write', ret); + if (ret === false) { // If the user unpiped during `dest.write()`, it is possible // to get stuck in a permanently paused state if that write // also returned false. @@ -654,7 +651,6 @@ Readable.prototype.pipe = function(dest, pipeOpts) { !cleanedUp) { debug('false write response, pause', state.awaitDrain); state.awaitDrain++; - increasedAwaitDrain = true; } src.pause(); } @@ -830,7 +826,6 @@ function resume_(stream, state) { } state.resumeScheduled = false; - state.awaitDrain = 0; stream.emit('resume'); flow(stream); if (state.flowing && !state.reading) diff --git a/test/parallel/test-stream-pipe-manual-resume.js b/test/parallel/test-stream-pipe-manual-resume.js new file mode 100644 index 00000000000000..08269acfd3b015 --- /dev/null +++ b/test/parallel/test-stream-pipe-manual-resume.js @@ -0,0 +1,35 @@ +'use strict'; +const common = require('../common'); +const stream = require('stream'); + +function test(throwCodeInbetween) { + // Check that a pipe does not stall if .read() is called unexpectedly + // (i.e. the stream is not resumed by the pipe). + + const n = 1000; + let counter = n; + const rs = stream.Readable({ + objectMode: true, + read: common.mustCallAtLeast(() => { + if (--counter >= 0) + rs.push({ counter }); + else + rs.push(null); + }, n) + }); + + const ws = stream.Writable({ + objectMode: true, + write: common.mustCall((data, enc, cb) => { + setImmediate(cb); + }, n) + }); + + setImmediate(() => throwCodeInbetween(rs, ws)); + + rs.pipe(ws); +} + +test((rs) => rs.read()); +test((rs) => rs.resume()); +test(() => 0);